From a57ae4ac6865a62865b4bdf6ba149744eab76e09 Mon Sep 17 00:00:00 2001 From: Alex Coleman <73913404+acoleman2000@users.noreply.github.com> Date: Mon, 21 Nov 2022 10:27:29 -0500 Subject: [PATCH 01/44] Create test_line_numbers.py --- schema_salad/tests/test_line_numbers.py | 56 +++++++++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100644 schema_salad/tests/test_line_numbers.py diff --git a/schema_salad/tests/test_line_numbers.py b/schema_salad/tests/test_line_numbers.py new file mode 100644 index 000000000..f64f5548c --- /dev/null +++ b/schema_salad/tests/test_line_numbers.py @@ -0,0 +1,56 @@ +#from parser import load_document_by_uri, save +from pathlib import Path +from schema_salad.utils import yaml_no_ts +from ruamel.yaml.comments import CommentedMap, CommentedSeq +from typing import Any, Dict, List, Optional, cast +import schema_salad.metaschema as cg_metaschema +from schema_salad import codegen +from schema_salad.avro.schema import Names +from schema_salad.fetcher import DefaultFetcher +from schema_salad.python_codegen import PythonCodeGen +from schema_salad.python_codegen_support import LoadingOptions +from schema_salad.schema import load_schema + + + +def check_structure(codegen_doc): + assert type(codegen_doc) == CommentedMap + + +def compare_comments(original_doc, codegen_doc): + return None + +def compare_line_numbers(original_doc, codegen_doc): + assert type(original_doc) == CommentedMap + assert type(codegen_doc) == CommentedMap + + assert original_doc.lc == codegen_doc.lc + + assert original_doc.lc.data == codegen_doc.lc.data + +def python_codegen( + file_uri: str, + target: Path, + parser_info: Optional[str] = None, + package: Optional[str] = None, +) -> None: + document_loader, avsc_names, schema_metadata, metaschema_loader = load_schema( + file_uri + ) + assert isinstance(avsc_names, Names) + schema_raw_doc = metaschema_loader.fetch(file_uri) + schema_doc, schema_metadata = metaschema_loader.resolve_all( + schema_raw_doc, file_uri + ) + codegen.codegen( + "python", + cast(List[Dict[str, Any]], schema_doc), + schema_metadata, + document_loader, + target=str(target), + parser_info=parser_info, + package=package, + ) + +if __name__ == "__main__": + python_codegen('https://github.com/common-workflow-language/common-workflow-language/raw/codegen/v1.0/CommonWorkflowLanguage.yml', 'cwl_v1_0.py') From 4b23d8df5492ccd71ee9fd0c772d3e56ad90c89a Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 9 Jan 2023 09:12:18 -0700 Subject: [PATCH 02/44] adding tests --- schema_salad/tests/test_line_numbers.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/schema_salad/tests/test_line_numbers.py b/schema_salad/tests/test_line_numbers.py index f64f5548c..c949c5345 100644 --- a/schema_salad/tests/test_line_numbers.py +++ b/schema_salad/tests/test_line_numbers.py @@ -10,6 +10,7 @@ from schema_salad.python_codegen import PythonCodeGen from schema_salad.python_codegen_support import LoadingOptions from schema_salad.schema import load_schema +import os @@ -54,3 +55,4 @@ def python_codegen( if __name__ == "__main__": python_codegen('https://github.com/common-workflow-language/common-workflow-language/raw/codegen/v1.0/CommonWorkflowLanguage.yml', 'cwl_v1_0.py') + assert(os.path.exists('cwl_v1_0.py')) \ No newline at end of file From 2e584ac181eeab29681074c33da9af2840310120 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 9 Jan 2023 09:13:00 -0700 Subject: [PATCH 03/44] updating save method within python_codegen_support.py --- schema_salad/python_codegen_support.py | 41 ++++++++++++++++++-------- 1 file changed, 29 insertions(+), 12 deletions(-) diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index 4defed397..49026ee3c 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -27,7 +27,7 @@ from rdflib import Graph from rdflib.plugins.parsers.notation3 import BadSyntax -from ruamel.yaml.comments import CommentedMap +from ruamel.yaml.comments import CommentedMap, CommentedSeq from schema_salad.exceptions import SchemaSaladException, ValidationException from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher @@ -222,21 +222,38 @@ def save( top: bool = True, base_url: str = "", relative_uris: bool = True, + doc = None, ) -> save_type: if isinstance(val, Saveable): return val.save(top=top, base_url=base_url, relative_uris=relative_uris) if isinstance(val, MutableSequence): - return [ - save(v, top=False, base_url=base_url, relative_uris=relative_uris) - for v in val - ] + r = CommentedSeq() + for v in val: + if doc: + if v in doc: + r.lc.data.add_kv_line_col(v, doc.lc.data[v]) + r.append(save(v, top=False, base_url=base_url, relative_uris=relative_uris, doc=doc)) + return r + # return [ + # save(v, top=False, base_url=base_url, relative_uris=relative_uris) + # for v in val + # ] if isinstance(val, MutableMapping): - newdict = {} + newdict = CommentedMap() for key in val: + if doc: + if key in doc: + newdict.lc.add_kv_line_col(key, doc.lc.data[key]) newdict[key] = save( - val[key], top=False, base_url=base_url, relative_uris=relative_uris + val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc ) return newdict + # newdict = {} + # for key in val: + # newdict[key] = save( + # val[key], top=False, base_url=base_url, relative_uris=relative_uris + # ) + # return newdict if val is None or isinstance(val, (int, float, bool, str)): return val raise Exception("Not Saveable: %s" % type(val)) @@ -707,11 +724,11 @@ def _document_load( addl_metadata=addl_metadata, ) - doc = { - k: v - for k, v in doc.items() - if k not in ("$namespaces", "$schemas", "$base") - } + # doc = { + # k: v + # for k, v in doc.items() + # if k not in ("$namespaces", "$schemas", "$base") + # } if "$graph" in doc: loadingOptions.idx[baseuri] = ( From b7f22de2c4f2e47e232109257a8af80a0daf6554 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 9 Jan 2023 16:17:02 -0700 Subject: [PATCH 04/44] adding helper methods --- schema_salad/python_codegen_support.py | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index 49026ee3c..f96ad7b42 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -216,6 +216,28 @@ def load_field(val, fieldtype, baseuri, loadingOptions): Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str] ] +def get_line_numbers(doc: CommentedMap) -> dict: + line_numbers = {} + for key, value in doc.items(): + line_numbers[key] = {} + line_numbers[value] = {} + line_numbers[key]["line"] = doc.lc.data[key][0] + line_numbers[key]["col"] = doc.lc.data[key][1] + line_numbers[value]["line"] = doc.lc.data[key][2] + line_numbers[value]["col"] = doc.lc.data[key][3] + return line_numbers + +def get_max_line_num(doc: CommentedMap) -> int: + max_line = 0 + max_key = "" + cur = doc + while(type(cur) == CommentedMap): + for key in doc.keys(): + if doc.lc.data[key][2] > max_line: + max_line = doc.lc.data[key][2] + max_key = key + cur = cur[max_key] + return max_line def save( val: Any, @@ -225,7 +247,7 @@ def save( doc = None, ) -> save_type: if isinstance(val, Saveable): - return val.save(top=top, base_url=base_url, relative_uris=relative_uris) + return val.save(top=top, base_url=base_url, relative_uris=relative_uris, line_info=doc) if isinstance(val, MutableSequence): r = CommentedSeq() for v in val: From 21659d2470dc572211c1be35c26f77e2ac3e0cfb Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 9 Jan 2023 16:29:05 -0700 Subject: [PATCH 05/44] adding additional helper method --- schema_salad/python_codegen_support.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index f96ad7b42..f9045e84d 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -216,6 +216,22 @@ def load_field(val, fieldtype, baseuri, loadingOptions): Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str] ] +def add_kv(old_doc: CommentedMap, new_doc: CommentedMap, line_numbers: dict, key: str, val: Any, max_len: int, cols: dict)->int: + if key in line_numbers: + new_doc.lc.add_kv_line_col(key, old_doc.lc.info["key"]) + elif val in line_numbers: + line = line_numbers[val]["line"] + if line in cols: + col = max(line_numbers[val]["col"], cols[line]) + else: + col = line_numbers[val]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + cols[line] = col + len("id") + 2 + else: + new_doc.lc.add_kv_line_col(key, [max_len, 0, max_len, len(key) + 2]) + max_len += 1 + return max_len + def get_line_numbers(doc: CommentedMap) -> dict: line_numbers = {} for key, value in doc.items(): From f68e74ab90665e0dc12526fcd8b3f402f88b850c Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 9 Jan 2023 16:47:16 -0700 Subject: [PATCH 06/44] fixing bugs in helper method --- schema_salad/python_codegen_support.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index f9045e84d..ccfb46bcc 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -215,7 +215,6 @@ def load_field(val, fieldtype, baseuri, loadingOptions): save_type = Optional[ Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str] ] - def add_kv(old_doc: CommentedMap, new_doc: CommentedMap, line_numbers: dict, key: str, val: Any, max_len: int, cols: dict)->int: if key in line_numbers: new_doc.lc.add_kv_line_col(key, old_doc.lc.info["key"]) @@ -231,16 +230,21 @@ def add_kv(old_doc: CommentedMap, new_doc: CommentedMap, line_numbers: dict, key new_doc.lc.add_kv_line_col(key, [max_len, 0, max_len, len(key) + 2]) max_len += 1 return max_len - + + + + def get_line_numbers(doc: CommentedMap) -> dict: line_numbers = {} for key, value in doc.items(): line_numbers[key] = {} - line_numbers[value] = {} + line_numbers[key]["line"] = doc.lc.data[key][0] line_numbers[key]["col"] = doc.lc.data[key][1] - line_numbers[value]["line"] = doc.lc.data[key][2] - line_numbers[value]["col"] = doc.lc.data[key][3] + if type(value) in [str, int]: + line_numbers[value] = {} + line_numbers[value]["line"] = doc.lc.data[key][2] + line_numbers[value]["col"] = doc.lc.data[key][3] return line_numbers def get_max_line_num(doc: CommentedMap) -> int: @@ -248,13 +252,14 @@ def get_max_line_num(doc: CommentedMap) -> int: max_key = "" cur = doc while(type(cur) == CommentedMap): - for key in doc.keys(): - if doc.lc.data[key][2] > max_line: - max_line = doc.lc.data[key][2] + for key in cur.keys(): + if cur.lc.data[key][2] >= max_line: + max_line = cur.lc.data[key][2] max_key = key cur = cur[max_key] return max_line + def save( val: Any, top: bool = True, From 2c26bfae00649f64c735dd1f4f5f520aa053b704 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 9 Jan 2023 20:47:07 -0700 Subject: [PATCH 07/44] updating python codegen_support.py and python_codegen.py --- schema_salad/python_codegen.py | 35 ++++++++++++++++++++++---- schema_salad/python_codegen_support.py | 8 +++--- 2 files changed, 34 insertions(+), 9 deletions(-) diff --git a/schema_salad/python_codegen.py b/schema_salad/python_codegen.py index a2e0a50f6..a5ec3a122 100644 --- a/schema_salad/python_codegen.py +++ b/schema_salad/python_codegen.py @@ -170,6 +170,8 @@ def begin_class( self.out.write(" pass\n\n\n") return + field_names.append("_doc") + required_field_names = [f for f in field_names if f not in optional_fields] optional_field_names = [f for f in field_names if f in optional_fields] @@ -274,10 +276,16 @@ def fromDoc( self.serializer.write( """ def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} - + self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info = None + ) -> CommentedMap: + r = CommentedMap() + if (line_info != None and type(self._doc) == dict): + self._doc = line_info + if (type(self._doc) == CommentedMap): + r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + line_numbers = get_line_numbers(self._doc) + max_len = get_max_line_num(self._doc) + cols = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] @@ -301,6 +309,15 @@ def save( self.serializer.write( """ r["class"] = "{class_}" + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="class", + val=r.get("class"), + max_len=max_len, + cols=cols, + ) """.format( class_=classname ) @@ -395,6 +412,7 @@ def type_loader( sub_names: List[str] = list( dict.fromkeys([self.type_loader(i).name for i in type_declaration]) ) + return self.declare_type( TypeDef( "union_of_{}".format("_or_".join(sub_names)), @@ -567,12 +585,15 @@ def declare_field( if self.{safename} is not None: u = save_relative_uri(self.{safename}, {baseurl}, {scoped_id}, {ref_scope}, relative_uris) r["{fieldname}"] = u + max_len = add_kv(old_doc = self._doc, new_doc = r, line_numbers = line_numbers, key = "{key_1}", val = r.get("{key_2}"), max_len = max_len, cols = cols) """.format( safename=self.safe_name(name), fieldname=shortname(name).strip(), baseurl=baseurl, scoped_id=fieldtype.scoped_id, ref_scope=fieldtype.ref_scope, + key_1=self.safe_name(name), + key_2=self.safe_name(name), ), 8, ) @@ -583,12 +604,16 @@ def declare_field( """ if self.{safename} is not None: r["{fieldname}"] = save( - self.{safename}, top=False, base_url={baseurl}, relative_uris=relative_uris + self.{safename}, top=False, base_url={baseurl}, relative_uris=relative_uris, doc=self._doc.get("{keyname}") ) + max_len = add_kv(old_doc = self._doc, new_doc = r, line_numbers = line_numbers, key = "{key_1}", val = r.get("{key_2}"), max_len = max_len, cols = cols) """.format( safename=self.safe_name(name), fieldname=shortname(name), baseurl=baseurl, + keyname=self.safe_name(name), + key_1=self.safe_name(name), + key_2=self.safe_name(name), ), 8, ) diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index ccfb46bcc..8641cd582 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -215,9 +215,10 @@ def load_field(val, fieldtype, baseuri, loadingOptions): save_type = Optional[ Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str] ] + def add_kv(old_doc: CommentedMap, new_doc: CommentedMap, line_numbers: dict, key: str, val: Any, max_len: int, cols: dict)->int: if key in line_numbers: - new_doc.lc.add_kv_line_col(key, old_doc.lc.info["key"]) + new_doc.lc.add_kv_line_col(key, old_doc.lc.data[key]) elif val in line_numbers: line = line_numbers[val]["line"] if line in cols: @@ -231,9 +232,6 @@ def add_kv(old_doc: CommentedMap, new_doc: CommentedMap, line_numbers: dict, key max_len += 1 return max_len - - - def get_line_numbers(doc: CommentedMap) -> dict: line_numbers = {} for key, value in doc.items(): @@ -275,6 +273,8 @@ def save( if doc: if v in doc: r.lc.data.add_kv_line_col(v, doc.lc.data[v]) + if len(r) == 1: + return r[0] r.append(save(v, top=False, base_url=base_url, relative_uris=relative_uris, doc=doc)) return r # return [ From e0ee3f4d44d953f40e571d4502792af093aa7f47 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Wed, 11 Jan 2023 13:48:22 -0700 Subject: [PATCH 08/44] updating files --- schema_salad/python_codegen.py | 31 ++++------ schema_salad/python_codegen_support.py | 82 +++++++++++++++++-------- schema_salad/tests/test_line_numbers.py | 43 +++++++------ 3 files changed, 94 insertions(+), 62 deletions(-) diff --git a/schema_salad/python_codegen.py b/schema_salad/python_codegen.py index a5ec3a122..2a8abc8b7 100644 --- a/schema_salad/python_codegen.py +++ b/schema_salad/python_codegen.py @@ -276,16 +276,16 @@ def fromDoc( self.serializer.write( """ def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info = None + self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None ) -> CommentedMap: r = CommentedMap() - if (line_info != None and type(self._doc) == dict): + if line_info is not None: self._doc = line_info if (type(self._doc) == CommentedMap): r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) line_numbers = get_line_numbers(self._doc) max_len = get_max_line_num(self._doc) - cols = {} + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] @@ -309,15 +309,7 @@ def save( self.serializer.write( """ r["class"] = "{class_}" - max_len = add_kv( - old_doc=self._doc, - new_doc=r, - line_numbers=line_numbers, - key="class", - val=r.get("class"), - max_len=max_len, - cols=cols, - ) + max_len = add_kv(old_doc=self._doc, new_doc=r, line_numbers=line_numbers, key="class", val=r.get("class"), max_len=max_len, cols=cols) """.format( class_=classname ) @@ -603,17 +595,20 @@ def declare_field( fmt( """ if self.{safename} is not None: - r["{fieldname}"] = save( - self.{safename}, top=False, base_url={baseurl}, relative_uris=relative_uris, doc=self._doc.get("{keyname}") + saved_val = save( + self.{safename}, top=False, base_url={baseurl}, relative_uris=relative_uris, doc=self._doc.get("{fieldname}") ) - max_len = add_kv(old_doc = self._doc, new_doc = r, line_numbers = line_numbers, key = "{key_1}", val = r.get("{key_2}"), max_len = max_len, cols = cols) + + if type(saved_val) == list: + if len(saved_val) == 1: # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["{fieldname}"] = saved_val + + max_len = add_kv(old_doc = self._doc, new_doc = r, line_numbers = line_numbers, key = "{fieldname}", val = r.get("{fieldname}"), max_len = max_len, cols = cols) """.format( safename=self.safe_name(name), fieldname=shortname(name), baseurl=baseurl, - keyname=self.safe_name(name), - key_1=self.safe_name(name), - key_2=self.safe_name(name), ), 8, ) diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index 8641cd582..4df8ef290 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -186,8 +186,8 @@ def fromDoc( @abstractmethod def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: + self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + ) -> CommentedMap: """Convert this object to a JSON/YAML friendly dictionary.""" @@ -216,46 +216,65 @@ def load_field(val, fieldtype, baseuri, loadingOptions): Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str] ] -def add_kv(old_doc: CommentedMap, new_doc: CommentedMap, line_numbers: dict, key: str, val: Any, max_len: int, cols: dict)->int: - if key in line_numbers: +def add_kv(old_doc: CommentedMap, new_doc: CommentedMap, line_numbers: dict[Any,dict[str,int]], key: str, val: Any, max_len: int, cols: dict[int,int])->int: + """Add key value pair into Commented Map. + + Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers for each key/val pair in the old CommentedMap, + key/val pair to insert, max_line of the old CommentedMap, and max col value taken for each line. + """ + if key in line_numbers: # If the key to insert is in the original CommentedMap new_doc.lc.add_kv_line_col(key, old_doc.lc.data[key]) - elif val in line_numbers: - line = line_numbers[val]["line"] - if line in cols: - col = max(line_numbers[val]["col"], cols[line]) - else: - col = line_numbers[val]["col"] - new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) - cols[line] = col + len("id") + 2 - else: + elif isinstance(val, (int, float, bool, str)): # If the value is hashable + if val in line_numbers: # If the value is in the original CommentedMap + line = line_numbers[val]["line"] + if line in cols: + col = max(line_numbers[val]["col"], cols[line]) + else: + col = line_numbers[val]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + cols[line] = col + len("id") + 2 + else: # If neither the key or value is in the original CommentedMap (or value is not hashable) new_doc.lc.add_kv_line_col(key, [max_len, 0, max_len, len(key) + 2]) max_len += 1 return max_len -def get_line_numbers(doc: CommentedMap) -> dict: - line_numbers = {} + +def get_line_numbers(doc: CommentedMap) -> dict[Any, dict[str, int]]: + """Get line numbers for kv pairs in CommentedMap. + + For each key/value pair in a CommentedMap, save the line/col info into a dictionary, + only save value info if value is hashable. + """ + line_numbers: Dict[Any, dict[str,int]] = {} + if type(doc) == dict: + return {} for key, value in doc.items(): line_numbers[key] = {} line_numbers[key]["line"] = doc.lc.data[key][0] line_numbers[key]["col"] = doc.lc.data[key][1] - if type(value) in [str, int]: + if isinstance(value, (int, float, bool, str)): line_numbers[value] = {} line_numbers[value]["line"] = doc.lc.data[key][2] line_numbers[value]["col"] = doc.lc.data[key][3] return line_numbers + def get_max_line_num(doc: CommentedMap) -> int: + """Get the max line number for a CommentedMap. + + Iterate through the the key with the highest line number until you reach a non-CommentedMap value or empty CommentedMap. + """ max_line = 0 max_key = "" cur = doc - while(type(cur) == CommentedMap): + while type(cur) == CommentedMap and len(cur) > 0: for key in cur.keys(): if cur.lc.data[key][2] >= max_line: - max_line = cur.lc.data[key][2] + max_line = cur.lc.data[key][2] max_key = key cur = cur[max_key] - return max_line + return max_line + 1 def save( @@ -263,18 +282,21 @@ def save( top: bool = True, base_url: str = "", relative_uris: bool = True, - doc = None, + doc: Optional[CommentedMap] = None, ) -> save_type: + """Save a val of any type. + + Recursively calls save method from class if val is of type Saveable. Otherwise, saves val to CommentedMap or CommentedSeq + """ if isinstance(val, Saveable): return val.save(top=top, base_url=base_url, relative_uris=relative_uris, line_info=doc) if isinstance(val, MutableSequence): r = CommentedSeq() for v in val: if doc: - if v in doc: - r.lc.data.add_kv_line_col(v, doc.lc.data[v]) - if len(r) == 1: - return r[0] + if isinstance(v,(int, float, bool, str)): + if v in doc: + r.lc.data.add_kv_line_col(v, doc.lc.data[v]) r.append(save(v, top=False, base_url=base_url, relative_uris=relative_uris, doc=doc)) return r # return [ @@ -285,8 +307,9 @@ def save( newdict = CommentedMap() for key in val: if doc: - if key in doc: - newdict.lc.add_kv_line_col(key, doc.lc.data[key]) + if isinstance(key, (int, float, bool, str)): + if key in doc: + newdict.lc.add_kv_line_col(key, doc.lc.data[key]) newdict[key] = save( val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc ) @@ -773,6 +796,13 @@ def _document_load( # if k not in ("$namespaces", "$schemas", "$base") # } + if "$namespaces" in doc: + doc.pop("$namespaces") + if "$schemas" in doc: + doc.pop("$schemas") + if "$base" in doc: + doc.pop("$base") + if "$graph" in doc: loadingOptions.idx[baseuri] = ( loader.load(doc["$graph"], baseuri, loadingOptions), diff --git a/schema_salad/tests/test_line_numbers.py b/schema_salad/tests/test_line_numbers.py index c949c5345..e2fa3fc61 100644 --- a/schema_salad/tests/test_line_numbers.py +++ b/schema_salad/tests/test_line_numbers.py @@ -3,31 +3,42 @@ from schema_salad.utils import yaml_no_ts from ruamel.yaml.comments import CommentedMap, CommentedSeq from typing import Any, Dict, List, Optional, cast -import schema_salad.metaschema as cg_metaschema from schema_salad import codegen from schema_salad.avro.schema import Names -from schema_salad.fetcher import DefaultFetcher -from schema_salad.python_codegen import PythonCodeGen -from schema_salad.python_codegen_support import LoadingOptions from schema_salad.schema import load_schema -import os -def check_structure(codegen_doc): +def compare_line_numbers(original_doc:CommentedMap, codegen_doc:CommentedMap)->None: + assert type(original_doc) == CommentedMap assert type(codegen_doc) == CommentedMap + assert original_doc.lc.line == codegen_doc.lc.line + assert original_doc.lc.col == codegen_doc.lc.col -def compare_comments(original_doc, codegen_doc): - return None + for key, lc_info in original_doc.lc.data.items(): + assert key in codegen_doc.lc.data + assert lc_info==codegen_doc.lc.data[key] -def compare_line_numbers(original_doc, codegen_doc): - assert type(original_doc) == CommentedMap - assert type(codegen_doc) == CommentedMap + max_line = get_max_line_number(original_doc) - assert original_doc.lc == codegen_doc.lc + for key, lc_info in codegen_doc.lc.data.items(): + if key in original_doc: + continue + assert lc_info == [max_line, 0, max_line, len(key) + 2] + max_line += 1 - assert original_doc.lc.data == codegen_doc.lc.data +def get_max_line_number(original_doc:CommentedMap)->int: + max_key = "" + max_line = 0 + temp_doc = original_doc + while (type(temp_doc) == CommentedMap) and len(temp_doc) > 0: + for key, lc_info in temp_doc.lc.data.items(): + if lc_info[0] >= max_line: + max_line = lc_info[0] + max_key = key + temp_doc = temp_doc[max_key] + return max_line + 1 def python_codegen( file_uri: str, @@ -50,9 +61,5 @@ def python_codegen( document_loader, target=str(target), parser_info=parser_info, - package=package, + package=package ) - -if __name__ == "__main__": - python_codegen('https://github.com/common-workflow-language/common-workflow-language/raw/codegen/v1.0/CommonWorkflowLanguage.yml', 'cwl_v1_0.py') - assert(os.path.exists('cwl_v1_0.py')) \ No newline at end of file From 8b31a59084c09fda897feb8871cd3c73ca02a876 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Wed, 11 Jan 2023 21:16:01 -0700 Subject: [PATCH 09/44] updating test --- schema_salad/tests/test_line_numbers.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/schema_salad/tests/test_line_numbers.py b/schema_salad/tests/test_line_numbers.py index e2fa3fc61..9a4c3f33e 100644 --- a/schema_salad/tests/test_line_numbers.py +++ b/schema_salad/tests/test_line_numbers.py @@ -7,7 +7,10 @@ from schema_salad.avro.schema import Names from schema_salad.schema import load_schema - +def test_codegen()->None: + compare_line_numbers() + compare_line_numbers() + compare_line_numbers() def compare_line_numbers(original_doc:CommentedMap, codegen_doc:CommentedMap)->None: assert type(original_doc) == CommentedMap From 6d8bce99b805523e5ea7ebd9da7b0ef5b23a0c96 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Fri, 13 Jan 2023 08:29:30 -0700 Subject: [PATCH 10/44] updating files and adding test cwl files --- schema_salad/python_codegen_support.py | 4 +-- schema_salad/tests/count-lines6-wf_v1_0.cwl | 26 ++++++++++++++++++++ schema_salad/tests/count-lines6-wf_v1_1.cwl | 26 ++++++++++++++++++++ schema_salad/tests/count-lines6-wf_v1_2.cwl | 27 +++++++++++++++++++++ schema_salad/tests/test_line_numbers.py | 6 +---- schema_salad/tests/wc3-tool_v1_0.cwl | 25 +++++++++++++++++++ schema_salad/tests/wc3-tool_v1_1.cwl | 24 ++++++++++++++++++ schema_salad/tests/wc3-tool_v1_2.cwl | 24 ++++++++++++++++++ 8 files changed, 155 insertions(+), 7 deletions(-) create mode 100644 schema_salad/tests/count-lines6-wf_v1_0.cwl create mode 100644 schema_salad/tests/count-lines6-wf_v1_1.cwl create mode 100644 schema_salad/tests/count-lines6-wf_v1_2.cwl create mode 100644 schema_salad/tests/wc3-tool_v1_0.cwl create mode 100644 schema_salad/tests/wc3-tool_v1_1.cwl create mode 100644 schema_salad/tests/wc3-tool_v1_2.cwl diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index 4df8ef290..3fc7e738e 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -248,7 +248,7 @@ def get_line_numbers(doc: CommentedMap) -> dict[Any, dict[str, int]]: line_numbers: Dict[Any, dict[str,int]] = {} if type(doc) == dict: return {} - for key, value in doc.items(): + for key, value in doc.lc.data.items(): line_numbers[key] = {} line_numbers[key]["line"] = doc.lc.data[key][0] @@ -269,7 +269,7 @@ def get_max_line_num(doc: CommentedMap) -> int: max_key = "" cur = doc while type(cur) == CommentedMap and len(cur) > 0: - for key in cur.keys(): + for key in cur.lc.data.keys(): if cur.lc.data[key][2] >= max_line: max_line = cur.lc.data[key][2] max_key = key diff --git a/schema_salad/tests/count-lines6-wf_v1_0.cwl b/schema_salad/tests/count-lines6-wf_v1_0.cwl new file mode 100644 index 000000000..88db53f1b --- /dev/null +++ b/schema_salad/tests/count-lines6-wf_v1_0.cwl @@ -0,0 +1,26 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.0 + +requirements: + - class: ScatterFeatureRequirement + - class: MultipleInputFeatureRequirement + +inputs: + file1: File[] + file2: File[] + +outputs: + count_output: + type: int + outputSource: step1/output + +steps: + step1: + run: wc3-tool_v1_0.cwl + scatter: file1 + in: + file1: + source: [file1, file2] + linkMerge: merge_nested + out: [output] \ No newline at end of file diff --git a/schema_salad/tests/count-lines6-wf_v1_1.cwl b/schema_salad/tests/count-lines6-wf_v1_1.cwl new file mode 100644 index 000000000..b61cc4453 --- /dev/null +++ b/schema_salad/tests/count-lines6-wf_v1_1.cwl @@ -0,0 +1,26 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.1 + +requirements: + - class: ScatterFeatureRequirement + - class: MultipleInputFeatureRequirement + +inputs: + file1: File[] + file2: File[] + +outputs: + count_output: + type: int + outputSource: step1/output + +steps: + step1: + run: wc3-tool_v1_1.cwl + scatter: file1 + in: + file1: + source: [file1, file2] + linkMerge: merge_nested + out: [output] \ No newline at end of file diff --git a/schema_salad/tests/count-lines6-wf_v1_2.cwl b/schema_salad/tests/count-lines6-wf_v1_2.cwl new file mode 100644 index 000000000..398fe8902 --- /dev/null +++ b/schema_salad/tests/count-lines6-wf_v1_2.cwl @@ -0,0 +1,27 @@ + +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.2 + +requirements: + - class: ScatterFeatureRequirement + - class: MultipleInputFeatureRequirement + +inputs: + file1: File[] + file2: File[] + +outputs: + count_output: + type: int + outputSource: step1/output + +steps: + step1: + run: wc3-tool_v1_2.cwl + scatter: file1 + in: + file1: + source: [file1, file2] + linkMerge: merge_nested + out: [output] \ No newline at end of file diff --git a/schema_salad/tests/test_line_numbers.py b/schema_salad/tests/test_line_numbers.py index 9a4c3f33e..8a2a59650 100644 --- a/schema_salad/tests/test_line_numbers.py +++ b/schema_salad/tests/test_line_numbers.py @@ -7,10 +7,6 @@ from schema_salad.avro.schema import Names from schema_salad.schema import load_schema -def test_codegen()->None: - compare_line_numbers() - compare_line_numbers() - compare_line_numbers() def compare_line_numbers(original_doc:CommentedMap, codegen_doc:CommentedMap)->None: assert type(original_doc) == CommentedMap @@ -65,4 +61,4 @@ def python_codegen( target=str(target), parser_info=parser_info, package=package - ) + ) \ No newline at end of file diff --git a/schema_salad/tests/wc3-tool_v1_0.cwl b/schema_salad/tests/wc3-tool_v1_0.cwl new file mode 100644 index 000000000..a213192cb --- /dev/null +++ b/schema_salad/tests/wc3-tool_v1_0.cwl @@ -0,0 +1,25 @@ +class: CommandLineTool +cwlVersion: v1.0 + +requirements: + - class: InlineJavascriptRequirement +hints: + ResourceRequirement: + ramMin: 8 + +inputs: + file1: + type: File[] + inputBinding: {} +outputs: + output: + type: int + outputBinding: + glob: output.txt + loadContents: true + outputEval: | + ${ + var s = self[0].contents.split(/\r?\n/); + return parseInt(s[s.length-2]); + } +stdout: output.txt \ No newline at end of file diff --git a/schema_salad/tests/wc3-tool_v1_1.cwl b/schema_salad/tests/wc3-tool_v1_1.cwl new file mode 100644 index 000000000..30d632de4 --- /dev/null +++ b/schema_salad/tests/wc3-tool_v1_1.cwl @@ -0,0 +1,24 @@ +#!/usr/bin/env cwl-runner +class: CommandLineTool +cwlVersion: v1.1 + +requirements: + - class: InlineJavascriptRequirement + +inputs: + file1: + type: File[] + inputBinding: {} +outputs: + output: + type: int + outputBinding: + glob: output.txt + loadContents: true + outputEval: | + ${ + var s = self[0].contents.split(/\r?\n/); + return parseInt(s[s.length-2]); + } +stdout: output.txt +baseCommand: wc \ No newline at end of file diff --git a/schema_salad/tests/wc3-tool_v1_2.cwl b/schema_salad/tests/wc3-tool_v1_2.cwl new file mode 100644 index 000000000..23df81489 --- /dev/null +++ b/schema_salad/tests/wc3-tool_v1_2.cwl @@ -0,0 +1,24 @@ +#!/usr/bin/env cwl-runner +class: CommandLineTool +cwlVersion: v1.2 + +requirements: + - class: InlineJavascriptRequirement + +inputs: + file1: + type: File[] + inputBinding: {} +outputs: + output: + type: int + outputBinding: + glob: output.txt + loadContents: true + outputEval: | + ${ + var s = self[0].contents.split(/\r?\n/); + return parseInt(s[s.length-2]); + } +stdout: output.txt +baseCommand: wc \ No newline at end of file From 314359510fefe4c44d34518421afc0cefbb1816f Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Tue, 17 Jan 2023 10:11:07 -0700 Subject: [PATCH 11/44] Fixing bug with updating sub-docs and non-kv values getting added to returned doc --- schema_salad/python_codegen_support.py | 28 +++++++++++++++++++------- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index 3fc7e738e..8d940474a 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -233,6 +233,9 @@ def add_kv(old_doc: CommentedMap, new_doc: CommentedMap, line_numbers: dict[Any, col = line_numbers[val]["col"] new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) cols[line] = col + len("id") + 2 + else: # If neither the key or value is in the original CommentedMap (or value is not hashable) + new_doc.lc.add_kv_line_col(key, [max_len, 0, max_len, len(key) + 2]) + max_len += 1 else: # If neither the key or value is in the original CommentedMap (or value is not hashable) new_doc.lc.add_kv_line_col(key, [max_len, 0, max_len, len(key) + 2]) max_len += 1 @@ -292,12 +295,14 @@ def save( return val.save(top=top, base_url=base_url, relative_uris=relative_uris, line_info=doc) if isinstance(val, MutableSequence): r = CommentedSeq() - for v in val: + for i in range(0, len(val)): if doc: - if isinstance(v,(int, float, bool, str)): - if v in doc: + if isinstance(val[i],(int, float, bool, str)): + if val[i] in doc: r.lc.data.add_kv_line_col(v, doc.lc.data[v]) - r.append(save(v, top=False, base_url=base_url, relative_uris=relative_uris, doc=doc)) + r.append(save(val[i], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc[i])) + else: + r.append(save(val[i], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc)) return r # return [ # save(v, top=False, base_url=base_url, relative_uris=relative_uris) @@ -310,9 +315,18 @@ def save( if isinstance(key, (int, float, bool, str)): if key in doc: newdict.lc.add_kv_line_col(key, doc.lc.data[key]) - newdict[key] = save( - val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc - ) + newdict[key] = save( + val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc.get(key) + ) + else: + newdict[key] = save( + val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc + ) + + else: + newdict[key] = save( + val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc + ) return newdict # newdict = {} # for key in val: From 3b8080135488d78d9fa7beaab8851f1d07d79dc1 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Tue, 17 Jan 2023 10:19:19 -0700 Subject: [PATCH 12/44] updating CommentedSeq lc update --- schema_salad/python_codegen_support.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index a87e5cfb1..e722b14b9 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -301,8 +301,8 @@ def save( for i in range(0, len(val)): if doc: if isinstance(val[i],(int, float, bool, str)): - if val[i] in doc: - r.lc.data.add_kv_line_col(v, doc.lc.data[v]) + if i in doc: + r.lc.data.add_kv_line_col(i, doc.lc.data[i]) r.append(save(val[i], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc[i])) else: r.append(save(val[i], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc)) From 8152f0c732bb01a71c0afb8b222e173c5b87b421 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Tue, 17 Jan 2023 10:25:47 -0700 Subject: [PATCH 13/44] updating CommentedSeq lc data --- schema_salad/python_codegen_support.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index e722b14b9..694f60134 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -298,11 +298,11 @@ def save( return val.save(top=top, base_url=base_url, relative_uris=relative_uris, line_info=doc) if isinstance(val, MutableSequence): r = CommentedSeq() + r.lc.data = {} for i in range(0, len(val)): if doc: - if isinstance(val[i],(int, float, bool, str)): - if i in doc: - r.lc.data.add_kv_line_col(i, doc.lc.data[i]) + if i in doc.lc.data: + r.lc.data[i] = doc.lc.data[i] r.append(save(val[i], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc[i])) else: r.append(save(val[i], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc)) From 6ec87302ba93c2bddf7ec61c79351bbb9cf2671e Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Tue, 17 Jan 2023 10:28:42 -0700 Subject: [PATCH 14/44] Updating metaschema.py --- schema_salad/metaschema.py | 1435 ++++++++++++++++++++++++++++++++---- 1 file changed, 1293 insertions(+), 142 deletions(-) diff --git a/schema_salad/metaschema.py b/schema_salad/metaschema.py index 177ffcb32..1f9e8e290 100644 --- a/schema_salad/metaschema.py +++ b/schema_salad/metaschema.py @@ -30,7 +30,7 @@ from rdflib import Graph from rdflib.plugins.parsers.notation3 import BadSyntax -from ruamel.yaml.comments import CommentedMap +from ruamel.yaml.comments import CommentedMap, CommentedSeq from schema_salad.exceptions import SchemaSaladException, ValidationException from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher @@ -192,8 +192,8 @@ def fromDoc( @abstractmethod def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: + self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + ) -> CommentedMap: """Convert this object to a JSON/YAML friendly dictionary.""" @@ -222,27 +222,124 @@ def load_field(val, fieldtype, baseuri, loadingOptions): Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str] ] +def add_kv(old_doc: CommentedMap, new_doc: CommentedMap, line_numbers: dict[Any,dict[str,int]], key: str, val: Any, max_len: int, cols: dict[int,int])->int: + """Add key value pair into Commented Map. + + Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers for each key/val pair in the old CommentedMap, + key/val pair to insert, max_line of the old CommentedMap, and max col value taken for each line. + """ + if key in line_numbers: # If the key to insert is in the original CommentedMap + new_doc.lc.add_kv_line_col(key, old_doc.lc.data[key]) + elif isinstance(val, (int, float, bool, str)): # If the value is hashable + if val in line_numbers: # If the value is in the original CommentedMap + line = line_numbers[val]["line"] + if line in cols: + col = max(line_numbers[val]["col"], cols[line]) + else: + col = line_numbers[val]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + cols[line] = col + len("id") + 2 + else: # If neither the key or value is in the original CommentedMap (or value is not hashable) + new_doc.lc.add_kv_line_col(key, [max_len, 0, max_len, len(key) + 2]) + max_len += 1 + else: # If neither the key or value is in the original CommentedMap (or value is not hashable) + new_doc.lc.add_kv_line_col(key, [max_len, 0, max_len, len(key) + 2]) + max_len += 1 + return max_len + + +def get_line_numbers(doc: CommentedMap) -> dict[Any, dict[str, int]]: + """Get line numbers for kv pairs in CommentedMap. + + For each key/value pair in a CommentedMap, save the line/col info into a dictionary, + only save value info if value is hashable. + """ + line_numbers: Dict[Any, dict[str,int]] = {} + if type(doc) == dict: + return {} + for key, value in doc.lc.data.items(): + line_numbers[key] = {} + + line_numbers[key]["line"] = doc.lc.data[key][0] + line_numbers[key]["col"] = doc.lc.data[key][1] + if isinstance(value, (int, float, bool, str)): + line_numbers[value] = {} + line_numbers[value]["line"] = doc.lc.data[key][2] + line_numbers[value]["col"] = doc.lc.data[key][3] + return line_numbers + + +def get_max_line_num(doc: CommentedMap) -> int: + """Get the max line number for a CommentedMap. + + Iterate through the the key with the highest line number until you reach a non-CommentedMap value or empty CommentedMap. + """ + max_line = 0 + max_key = "" + cur = doc + while type(cur) == CommentedMap and len(cur) > 0: + for key in cur.lc.data.keys(): + if cur.lc.data[key][2] >= max_line: + max_line = cur.lc.data[key][2] + max_key = key + cur = cur[max_key] + return max_line + 1 + def save( val: Any, top: bool = True, base_url: str = "", relative_uris: bool = True, + doc: Optional[CommentedMap] = None, ) -> save_type: + """Save a val of any type. + + Recursively calls save method from class if val is of type Saveable. Otherwise, saves val to CommentedMap or CommentedSeq + """ if isinstance(val, Saveable): - return val.save(top=top, base_url=base_url, relative_uris=relative_uris) + return val.save(top=top, base_url=base_url, relative_uris=relative_uris, line_info=doc) if isinstance(val, MutableSequence): - return [ - save(v, top=False, base_url=base_url, relative_uris=relative_uris) - for v in val - ] + r = CommentedSeq() + r.lc.data = {} + for i in range(0, len(val)): + if doc: + if i in doc.lc.data: + r.lc.data[i] = doc.lc.data[i] + r.append(save(val[i], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc[i])) + else: + r.append(save(val[i], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc)) + return r + # return [ + # save(v, top=False, base_url=base_url, relative_uris=relative_uris) + # for v in val + # ] if isinstance(val, MutableMapping): - newdict = {} + newdict = CommentedMap() for key in val: - newdict[key] = save( - val[key], top=False, base_url=base_url, relative_uris=relative_uris - ) + if doc: + if isinstance(key, (int, float, bool, str)): + if key in doc: + newdict.lc.add_kv_line_col(key, doc.lc.data[key]) + newdict[key] = save( + val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc.get(key) + ) + else: + newdict[key] = save( + val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc + ) + + else: + newdict[key] = save( + val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc + ) return newdict + # newdict = {} + # for key in val: + # newdict[key] = save( + # val[key], top=False, base_url=base_url, relative_uris=relative_uris + # ) + # return newdict if val is None or isinstance(val, (int, float, bool, str)): return val raise Exception("Not Saveable: %s" % type(val)) @@ -713,11 +810,18 @@ def _document_load( addl_metadata=addl_metadata, ) - doc = { - k: v - for k, v in doc.items() - if k not in ("$namespaces", "$schemas", "$base") - } + # doc = { + # k: v + # for k, v in doc.items() + # if k not in ("$namespaces", "$schemas", "$base") + # } + + if "$namespaces" in doc: + doc.pop("$namespaces") + if "$schemas" in doc: + doc.pop("$schemas") + if "$base" in doc: + doc.pop("$base") if "$graph" in doc: loadingOptions.idx[baseuri] = ( @@ -877,6 +981,7 @@ def __init__( self, name: Any, type: Any, + _doc: Any, doc: Optional[Any] = None, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, @@ -893,6 +998,7 @@ def __init__( self.doc = doc self.name = name self.type = type + self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, RecordField): @@ -900,11 +1006,12 @@ def __eq__(self, other: Any) -> bool: self.doc == other.doc and self.name == other.name and self.type == other.type + and self._doc == other._doc ) return False def __hash__(self) -> int: - return hash((self.doc, self.name, self.type)) + return hash((self.doc, self.name, self.type, self._doc)) @classmethod def fromDoc( @@ -990,7 +1097,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `_doc`".format( k ), SourceLine(_doc, k, str), @@ -1004,6 +1111,7 @@ def fromDoc( doc=doc, name=name, type=type, + _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -1011,10 +1119,16 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} - + self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + ) -> CommentedMap: + r = CommentedMap() + if line_info is not None: + self._doc = line_info + if (type(self._doc) == CommentedMap): + r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + line_numbers = get_line_numbers(self._doc) + max_len = get_max_line_num(self._doc) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] @@ -1024,13 +1138,64 @@ def save( if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + max_len=max_len, + cols=cols, + ) if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + saved_val = save( + self.doc, + top=False, + base_url=self.name, + relative_uris=relative_uris, + doc=self._doc.get("doc"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["doc"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + max_len=max_len, + cols=cols, ) if self.type is not None: - r["type"] = save( - self.type, top=False, base_url=self.name, relative_uris=relative_uris + saved_val = save( + self.type, + top=False, + base_url=self.name, + relative_uris=relative_uris, + doc=self._doc.get("type"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["type"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + max_len=max_len, + cols=cols, ) # top refers to the directory level @@ -1041,13 +1206,14 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["doc", "name", "type"]) + attrs = frozenset(["doc", "name", "type", "_doc"]) class RecordSchema(Saveable): def __init__( self, type: Any, + _doc: Any, fields: Optional[Any] = None, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, @@ -1063,14 +1229,19 @@ def __init__( self.loadingOptions = LoadingOptions() self.fields = fields self.type = type + self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, RecordSchema): - return bool(self.fields == other.fields and self.type == other.type) + return bool( + self.fields == other.fields + and self.type == other.type + and self._doc == other._doc + ) return False def __hash__(self) -> int: - return hash((self.fields, self.type)) + return hash((self.fields, self.type, self._doc)) @classmethod def fromDoc( @@ -1129,7 +1300,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format( + "invalid field `{}`, expected one of: `fields`, `type`, `_doc`".format( k ), SourceLine(_doc, k, str), @@ -1142,16 +1313,23 @@ def fromDoc( _constructed = cls( fields=fields, type=type, + _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} - + self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + ) -> CommentedMap: + r = CommentedMap() + if line_info is not None: + self._doc = line_info + if (type(self._doc) == CommentedMap): + r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + line_numbers = get_line_numbers(self._doc) + max_len = get_max_line_num(self._doc) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] @@ -1159,12 +1337,54 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.fields is not None: - r["fields"] = save( - self.fields, top=False, base_url=base_url, relative_uris=relative_uris + saved_val = save( + self.fields, + top=False, + base_url=base_url, + relative_uris=relative_uris, + doc=self._doc.get("fields"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["fields"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + max_len=max_len, + cols=cols, ) if self.type is not None: - r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + saved_val = save( + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + doc=self._doc.get("type"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["type"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + max_len=max_len, + cols=cols, ) # top refers to the directory level @@ -1175,7 +1395,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["fields", "type"]) + attrs = frozenset(["fields", "type", "_doc"]) class EnumSchema(Saveable): @@ -1188,6 +1408,7 @@ def __init__( self, symbols: Any, type: Any, + _doc: Any, name: Optional[Any] = None, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, @@ -1204,6 +1425,7 @@ def __init__( self.name = name self.symbols = symbols self.type = type + self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, EnumSchema): @@ -1211,11 +1433,12 @@ def __eq__(self, other: Any) -> bool: self.name == other.name and self.symbols == other.symbols and self.type == other.type + and self._doc == other._doc ) return False def __hash__(self) -> int: - return hash((self.name, self.symbols, self.type)) + return hash((self.name, self.symbols, self.type, self._doc)) @classmethod def fromDoc( @@ -1298,7 +1521,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`".format( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `_doc`".format( k ), SourceLine(_doc, k, str), @@ -1312,6 +1535,7 @@ def fromDoc( name=name, symbols=symbols, type=type, + _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -1319,10 +1543,16 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} - + self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + ) -> CommentedMap: + r = CommentedMap() + if line_info is not None: + self._doc = line_info + if (type(self._doc) == CommentedMap): + r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + line_numbers = get_line_numbers(self._doc) + max_len = get_max_line_num(self._doc) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] @@ -1332,12 +1562,51 @@ def save( if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + max_len=max_len, + cols=cols, + ) if self.symbols is not None: u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) r["symbols"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + max_len=max_len, + cols=cols, + ) if self.type is not None: - r["type"] = save( - self.type, top=False, base_url=self.name, relative_uris=relative_uris + saved_val = save( + self.type, + top=False, + base_url=self.name, + relative_uris=relative_uris, + doc=self._doc.get("type"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["type"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + max_len=max_len, + cols=cols, ) # top refers to the directory level @@ -1348,7 +1617,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["name", "symbols", "type"]) + attrs = frozenset(["name", "symbols", "type", "_doc"]) class ArraySchema(Saveable): @@ -1356,6 +1625,7 @@ def __init__( self, items: Any, type: Any, + _doc: Any, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: @@ -1370,14 +1640,19 @@ def __init__( self.loadingOptions = LoadingOptions() self.items = items self.type = type + self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, ArraySchema): - return bool(self.items == other.items and self.type == other.type) + return bool( + self.items == other.items + and self.type == other.type + and self._doc == other._doc + ) return False def __hash__(self) -> int: - return hash((self.items, self.type)) + return hash((self.items, self.type, self._doc)) @classmethod def fromDoc( @@ -1433,7 +1708,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format( + "invalid field `{}`, expected one of: `items`, `type`, `_doc`".format( k ), SourceLine(_doc, k, str), @@ -1446,16 +1721,23 @@ def fromDoc( _constructed = cls( items=items, type=type, + _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} - + self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + ) -> CommentedMap: + r = CommentedMap() + if line_info is not None: + self._doc = line_info + if (type(self._doc) == CommentedMap): + r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + line_numbers = get_line_numbers(self._doc) + max_len = get_max_line_num(self._doc) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] @@ -1465,9 +1747,39 @@ def save( if self.items is not None: u = save_relative_uri(self.items, base_url, False, 2, relative_uris) r["items"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + max_len=max_len, + cols=cols, + ) if self.type is not None: - r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + saved_val = save( + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + doc=self._doc.get("type"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["type"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + max_len=max_len, + cols=cols, ) # top refers to the directory level @@ -1478,7 +1790,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["items", "type"]) + attrs = frozenset(["items", "type", "_doc"]) class JsonldPredicate(Saveable): @@ -1490,6 +1802,7 @@ class JsonldPredicate(Saveable): def __init__( self, + _doc: Any, _id: Optional[Any] = None, _type: Optional[Any] = None, _container: Optional[Any] = None, @@ -1524,6 +1837,7 @@ def __init__( self.typeDSL = typeDSL self.secondaryFilesDSL = secondaryFilesDSL self.subscope = subscope + self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, JsonldPredicate): @@ -1539,6 +1853,7 @@ def __eq__(self, other: Any) -> bool: and self.typeDSL == other.typeDSL and self.secondaryFilesDSL == other.secondaryFilesDSL and self.subscope == other.subscope + and self._doc == other._doc ) return False @@ -1556,6 +1871,7 @@ def __hash__(self) -> int: self.typeDSL, self.secondaryFilesDSL, self.subscope, + self._doc, ) ) @@ -1781,7 +2097,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `_id`, `_type`, `_container`, `identity`, `noLinkCheck`, `mapSubject`, `mapPredicate`, `refScope`, `typeDSL`, `secondaryFilesDSL`, `subscope`".format( + "invalid field `{}`, expected one of: `_id`, `_type`, `_container`, `identity`, `noLinkCheck`, `mapSubject`, `mapPredicate`, `refScope`, `typeDSL`, `secondaryFilesDSL`, `subscope`, `_doc`".format( k ), SourceLine(_doc, k, str), @@ -1803,16 +2119,23 @@ def fromDoc( typeDSL=typeDSL, secondaryFilesDSL=secondaryFilesDSL, subscope=subscope, + _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} - + self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + ) -> CommentedMap: + r = CommentedMap() + if line_info is not None: + self._doc = line_info + if (type(self._doc) == CommentedMap): + r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + line_numbers = get_line_numbers(self._doc) + max_len = get_max_line_num(self._doc) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] @@ -1822,60 +2145,264 @@ def save( if self._id is not None: u = save_relative_uri(self._id, base_url, True, None, relative_uris) r["_id"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="_id", + val=r.get("_id"), + max_len=max_len, + cols=cols, + ) if self._type is not None: - r["_type"] = save( - self._type, top=False, base_url=base_url, relative_uris=relative_uris + saved_val = save( + self._type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + doc=self._doc.get("_type"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["_type"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="_type", + val=r.get("_type"), + max_len=max_len, + cols=cols, ) if self._container is not None: - r["_container"] = save( + saved_val = save( self._container, top=False, base_url=base_url, relative_uris=relative_uris, + doc=self._doc.get("_container"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["_container"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="_container", + val=r.get("_container"), + max_len=max_len, + cols=cols, ) if self.identity is not None: - r["identity"] = save( - self.identity, top=False, base_url=base_url, relative_uris=relative_uris + saved_val = save( + self.identity, + top=False, + base_url=base_url, + relative_uris=relative_uris, + doc=self._doc.get("identity"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["identity"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="identity", + val=r.get("identity"), + max_len=max_len, + cols=cols, ) if self.noLinkCheck is not None: - r["noLinkCheck"] = save( + saved_val = save( self.noLinkCheck, top=False, base_url=base_url, relative_uris=relative_uris, + doc=self._doc.get("noLinkCheck"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["noLinkCheck"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="noLinkCheck", + val=r.get("noLinkCheck"), + max_len=max_len, + cols=cols, ) if self.mapSubject is not None: - r["mapSubject"] = save( + saved_val = save( self.mapSubject, top=False, base_url=base_url, relative_uris=relative_uris, + doc=self._doc.get("mapSubject"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["mapSubject"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="mapSubject", + val=r.get("mapSubject"), + max_len=max_len, + cols=cols, ) if self.mapPredicate is not None: - r["mapPredicate"] = save( + saved_val = save( self.mapPredicate, top=False, base_url=base_url, relative_uris=relative_uris, + doc=self._doc.get("mapPredicate"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["mapPredicate"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="mapPredicate", + val=r.get("mapPredicate"), + max_len=max_len, + cols=cols, ) if self.refScope is not None: - r["refScope"] = save( - self.refScope, top=False, base_url=base_url, relative_uris=relative_uris + saved_val = save( + self.refScope, + top=False, + base_url=base_url, + relative_uris=relative_uris, + doc=self._doc.get("refScope"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["refScope"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="refScope", + val=r.get("refScope"), + max_len=max_len, + cols=cols, ) if self.typeDSL is not None: - r["typeDSL"] = save( - self.typeDSL, top=False, base_url=base_url, relative_uris=relative_uris + saved_val = save( + self.typeDSL, + top=False, + base_url=base_url, + relative_uris=relative_uris, + doc=self._doc.get("typeDSL"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["typeDSL"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="typeDSL", + val=r.get("typeDSL"), + max_len=max_len, + cols=cols, ) if self.secondaryFilesDSL is not None: - r["secondaryFilesDSL"] = save( + saved_val = save( self.secondaryFilesDSL, top=False, base_url=base_url, relative_uris=relative_uris, + doc=self._doc.get("secondaryFilesDSL"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["secondaryFilesDSL"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFilesDSL", + val=r.get("secondaryFilesDSL"), + max_len=max_len, + cols=cols, ) if self.subscope is not None: - r["subscope"] = save( - self.subscope, top=False, base_url=base_url, relative_uris=relative_uris + saved_val = save( + self.subscope, + top=False, + base_url=base_url, + relative_uris=relative_uris, + doc=self._doc.get("subscope"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["subscope"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="subscope", + val=r.get("subscope"), + max_len=max_len, + cols=cols, ) # top refers to the directory level @@ -1899,6 +2426,7 @@ def save( "typeDSL", "secondaryFilesDSL", "subscope", + "_doc", ] ) @@ -1908,6 +2436,7 @@ def __init__( self, specializeFrom: Any, specializeTo: Any, + _doc: Any, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: @@ -1922,17 +2451,19 @@ def __init__( self.loadingOptions = LoadingOptions() self.specializeFrom = specializeFrom self.specializeTo = specializeTo + self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, SpecializeDef): return bool( self.specializeFrom == other.specializeFrom and self.specializeTo == other.specializeTo + and self._doc == other._doc ) return False def __hash__(self) -> int: - return hash((self.specializeFrom, self.specializeTo)) + return hash((self.specializeFrom, self.specializeTo, self._doc)) @classmethod def fromDoc( @@ -1988,7 +2519,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `specializeFrom`, `specializeTo`".format( + "invalid field `{}`, expected one of: `specializeFrom`, `specializeTo`, `_doc`".format( k ), SourceLine(_doc, k, str), @@ -2001,16 +2532,23 @@ def fromDoc( _constructed = cls( specializeFrom=specializeFrom, specializeTo=specializeTo, + _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} - + self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + ) -> CommentedMap: + r = CommentedMap() + if line_info is not None: + self._doc = line_info + if (type(self._doc) == CommentedMap): + r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + line_numbers = get_line_numbers(self._doc) + max_len = get_max_line_num(self._doc) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] @@ -2022,9 +2560,27 @@ def save( self.specializeFrom, base_url, False, 1, relative_uris ) r["specializeFrom"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="specializeFrom", + val=r.get("specializeFrom"), + max_len=max_len, + cols=cols, + ) if self.specializeTo is not None: u = save_relative_uri(self.specializeTo, base_url, False, 1, relative_uris) r["specializeTo"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="specializeTo", + val=r.get("specializeTo"), + max_len=max_len, + cols=cols, + ) # top refers to the directory level if top: @@ -2034,7 +2590,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["specializeFrom", "specializeTo"]) + attrs = frozenset(["specializeFrom", "specializeTo", "_doc"]) class NamedType(Saveable): @@ -2063,6 +2619,7 @@ def __init__( self, name: Any, type: Any, + _doc: Any, doc: Optional[Any] = None, jsonldPredicate: Optional[Any] = None, default: Optional[Any] = None, @@ -2083,6 +2640,7 @@ def __init__( self.type = type self.jsonldPredicate = jsonldPredicate self.default = default + self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, SaladRecordField): @@ -2092,12 +2650,20 @@ def __eq__(self, other: Any) -> bool: and self.type == other.type and self.jsonldPredicate == other.jsonldPredicate and self.default == other.default + and self._doc == other._doc ) return False def __hash__(self) -> int: return hash( - (self.doc, self.name, self.type, self.jsonldPredicate, self.default) + ( + self.doc, + self.name, + self.type, + self.jsonldPredicate, + self.default, + self._doc, + ) ) @classmethod @@ -2220,7 +2786,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `jsonldPredicate`, `default`".format( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `jsonldPredicate`, `default`, `_doc`".format( k ), SourceLine(_doc, k, str), @@ -2236,6 +2802,7 @@ def fromDoc( type=type, jsonldPredicate=jsonldPredicate, default=default, + _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -2243,10 +2810,16 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} - + self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + ) -> CommentedMap: + r = CommentedMap() + if line_info is not None: + self._doc = line_info + if (type(self._doc) == CommentedMap): + r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + line_numbers = get_line_numbers(self._doc) + max_len = get_max_line_num(self._doc) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] @@ -2256,24 +2829,114 @@ def save( if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + max_len=max_len, + cols=cols, + ) if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + saved_val = save( + self.doc, + top=False, + base_url=self.name, + relative_uris=relative_uris, + doc=self._doc.get("doc"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["doc"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + max_len=max_len, + cols=cols, ) if self.type is not None: - r["type"] = save( - self.type, top=False, base_url=self.name, relative_uris=relative_uris + saved_val = save( + self.type, + top=False, + base_url=self.name, + relative_uris=relative_uris, + doc=self._doc.get("type"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["type"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + max_len=max_len, + cols=cols, ) if self.jsonldPredicate is not None: - r["jsonldPredicate"] = save( + saved_val = save( self.jsonldPredicate, top=False, base_url=self.name, relative_uris=relative_uris, + doc=self._doc.get("jsonldPredicate"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["jsonldPredicate"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="jsonldPredicate", + val=r.get("jsonldPredicate"), + max_len=max_len, + cols=cols, ) if self.default is not None: - r["default"] = save( - self.default, top=False, base_url=self.name, relative_uris=relative_uris + saved_val = save( + self.default, + top=False, + base_url=self.name, + relative_uris=relative_uris, + doc=self._doc.get("default"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["default"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + max_len=max_len, + cols=cols, ) # top refers to the directory level @@ -2284,7 +2947,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["doc", "name", "type", "jsonldPredicate", "default"]) + attrs = frozenset(["doc", "name", "type", "jsonldPredicate", "default", "_doc"]) class SaladRecordSchema(NamedType, RecordSchema, SchemaDefinedType): @@ -2292,6 +2955,7 @@ def __init__( self, name: Any, type: Any, + _doc: Any, inVocab: Optional[Any] = None, fields: Optional[Any] = None, doc: Optional[Any] = None, @@ -2328,6 +2992,7 @@ def __init__( self.abstract = abstract self.extends = extends self.specialize = specialize + self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, SaladRecordSchema): @@ -2345,6 +3010,7 @@ def __eq__(self, other: Any) -> bool: and self.abstract == other.abstract and self.extends == other.extends and self.specialize == other.specialize + and self._doc == other._doc ) return False @@ -2364,6 +3030,7 @@ def __hash__(self) -> int: self.abstract, self.extends, self.specialize, + self._doc, ) ) @@ -2631,7 +3298,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `inVocab`, `fields`, `type`, `doc`, `docParent`, `docChild`, `docAfter`, `jsonldPredicate`, `documentRoot`, `abstract`, `extends`, `specialize`".format( + "invalid field `{}`, expected one of: `name`, `inVocab`, `fields`, `type`, `doc`, `docParent`, `docChild`, `docAfter`, `jsonldPredicate`, `documentRoot`, `abstract`, `extends`, `specialize`, `_doc`".format( k ), SourceLine(_doc, k, str), @@ -2655,6 +3322,7 @@ def fromDoc( abstract=abstract, extends=extends, specialize=specialize, + _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -2662,10 +3330,16 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} - + self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + ) -> CommentedMap: + r = CommentedMap() + if line_info is not None: + self._doc = line_info + if (type(self._doc) == CommentedMap): + r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + line_numbers = get_line_numbers(self._doc) + max_len = get_max_line_num(self._doc) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] @@ -2675,61 +3349,262 @@ def save( if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + max_len=max_len, + cols=cols, + ) if self.inVocab is not None: - r["inVocab"] = save( - self.inVocab, top=False, base_url=self.name, relative_uris=relative_uris + saved_val = save( + self.inVocab, + top=False, + base_url=self.name, + relative_uris=relative_uris, + doc=self._doc.get("inVocab"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["inVocab"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="inVocab", + val=r.get("inVocab"), + max_len=max_len, + cols=cols, ) if self.fields is not None: - r["fields"] = save( - self.fields, top=False, base_url=self.name, relative_uris=relative_uris + saved_val = save( + self.fields, + top=False, + base_url=self.name, + relative_uris=relative_uris, + doc=self._doc.get("fields"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["fields"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + max_len=max_len, + cols=cols, ) if self.type is not None: - r["type"] = save( - self.type, top=False, base_url=self.name, relative_uris=relative_uris + saved_val = save( + self.type, + top=False, + base_url=self.name, + relative_uris=relative_uris, + doc=self._doc.get("type"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["type"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + max_len=max_len, + cols=cols, ) if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + saved_val = save( + self.doc, + top=False, + base_url=self.name, + relative_uris=relative_uris, + doc=self._doc.get("doc"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["doc"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + max_len=max_len, + cols=cols, ) if self.docParent is not None: u = save_relative_uri(self.docParent, self.name, False, None, relative_uris) r["docParent"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="docParent", + val=r.get("docParent"), + max_len=max_len, + cols=cols, + ) if self.docChild is not None: u = save_relative_uri(self.docChild, self.name, False, None, relative_uris) r["docChild"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="docChild", + val=r.get("docChild"), + max_len=max_len, + cols=cols, + ) if self.docAfter is not None: u = save_relative_uri(self.docAfter, self.name, False, None, relative_uris) r["docAfter"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="docAfter", + val=r.get("docAfter"), + max_len=max_len, + cols=cols, + ) if self.jsonldPredicate is not None: - r["jsonldPredicate"] = save( + saved_val = save( self.jsonldPredicate, top=False, base_url=self.name, relative_uris=relative_uris, + doc=self._doc.get("jsonldPredicate"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["jsonldPredicate"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="jsonldPredicate", + val=r.get("jsonldPredicate"), + max_len=max_len, + cols=cols, ) if self.documentRoot is not None: - r["documentRoot"] = save( + saved_val = save( self.documentRoot, top=False, base_url=self.name, relative_uris=relative_uris, + doc=self._doc.get("documentRoot"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["documentRoot"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="documentRoot", + val=r.get("documentRoot"), + max_len=max_len, + cols=cols, ) if self.abstract is not None: - r["abstract"] = save( + saved_val = save( self.abstract, top=False, base_url=self.name, relative_uris=relative_uris, + doc=self._doc.get("abstract"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["abstract"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="abstract", + val=r.get("abstract"), + max_len=max_len, + cols=cols, ) if self.extends is not None: u = save_relative_uri(self.extends, self.name, False, 1, relative_uris) r["extends"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="extends", + val=r.get("extends"), + max_len=max_len, + cols=cols, + ) if self.specialize is not None: - r["specialize"] = save( + saved_val = save( self.specialize, top=False, base_url=self.name, relative_uris=relative_uris, + doc=self._doc.get("specialize"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["specialize"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="specialize", + val=r.get("specialize"), + max_len=max_len, + cols=cols, ) # top refers to the directory level @@ -2755,6 +3630,7 @@ def save( "abstract", "extends", "specialize", + "_doc", ] ) @@ -2769,6 +3645,7 @@ def __init__( self, symbols: Any, type: Any, + _doc: Any, name: Optional[Any] = None, inVocab: Optional[Any] = None, doc: Optional[Any] = None, @@ -2801,6 +3678,7 @@ def __init__( self.jsonldPredicate = jsonldPredicate self.documentRoot = documentRoot self.extends = extends + self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, SaladEnumSchema): @@ -2816,6 +3694,7 @@ def __eq__(self, other: Any) -> bool: and self.jsonldPredicate == other.jsonldPredicate and self.documentRoot == other.documentRoot and self.extends == other.extends + and self._doc == other._doc ) return False @@ -2833,6 +3712,7 @@ def __hash__(self) -> int: self.jsonldPredicate, self.documentRoot, self.extends, + self._doc, ) ) @@ -3061,7 +3941,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `inVocab`, `symbols`, `type`, `doc`, `docParent`, `docChild`, `docAfter`, `jsonldPredicate`, `documentRoot`, `extends`".format( + "invalid field `{}`, expected one of: `name`, `inVocab`, `symbols`, `type`, `doc`, `docParent`, `docChild`, `docAfter`, `jsonldPredicate`, `documentRoot`, `extends`, `_doc`".format( k ), SourceLine(_doc, k, str), @@ -3083,6 +3963,7 @@ def fromDoc( jsonldPredicate=jsonldPredicate, documentRoot=documentRoot, extends=extends, + _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -3090,10 +3971,16 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} - + self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + ) -> CommentedMap: + r = CommentedMap() + if line_info is not None: + self._doc = line_info + if (type(self._doc) == CommentedMap): + r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + line_numbers = get_line_numbers(self._doc) + max_len = get_max_line_num(self._doc) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] @@ -3103,47 +3990,200 @@ def save( if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + max_len=max_len, + cols=cols, + ) if self.inVocab is not None: - r["inVocab"] = save( - self.inVocab, top=False, base_url=self.name, relative_uris=relative_uris + saved_val = save( + self.inVocab, + top=False, + base_url=self.name, + relative_uris=relative_uris, + doc=self._doc.get("inVocab"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["inVocab"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="inVocab", + val=r.get("inVocab"), + max_len=max_len, + cols=cols, ) if self.symbols is not None: u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) r["symbols"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + max_len=max_len, + cols=cols, + ) if self.type is not None: - r["type"] = save( - self.type, top=False, base_url=self.name, relative_uris=relative_uris + saved_val = save( + self.type, + top=False, + base_url=self.name, + relative_uris=relative_uris, + doc=self._doc.get("type"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["type"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + max_len=max_len, + cols=cols, ) if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + saved_val = save( + self.doc, + top=False, + base_url=self.name, + relative_uris=relative_uris, + doc=self._doc.get("doc"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["doc"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + max_len=max_len, + cols=cols, ) if self.docParent is not None: u = save_relative_uri(self.docParent, self.name, False, None, relative_uris) r["docParent"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="docParent", + val=r.get("docParent"), + max_len=max_len, + cols=cols, + ) if self.docChild is not None: u = save_relative_uri(self.docChild, self.name, False, None, relative_uris) r["docChild"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="docChild", + val=r.get("docChild"), + max_len=max_len, + cols=cols, + ) if self.docAfter is not None: u = save_relative_uri(self.docAfter, self.name, False, None, relative_uris) r["docAfter"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="docAfter", + val=r.get("docAfter"), + max_len=max_len, + cols=cols, + ) if self.jsonldPredicate is not None: - r["jsonldPredicate"] = save( + saved_val = save( self.jsonldPredicate, top=False, base_url=self.name, relative_uris=relative_uris, + doc=self._doc.get("jsonldPredicate"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["jsonldPredicate"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="jsonldPredicate", + val=r.get("jsonldPredicate"), + max_len=max_len, + cols=cols, ) if self.documentRoot is not None: - r["documentRoot"] = save( + saved_val = save( self.documentRoot, top=False, base_url=self.name, relative_uris=relative_uris, + doc=self._doc.get("documentRoot"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["documentRoot"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="documentRoot", + val=r.get("documentRoot"), + max_len=max_len, + cols=cols, ) if self.extends is not None: u = save_relative_uri(self.extends, self.name, False, 1, relative_uris) r["extends"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="extends", + val=r.get("extends"), + max_len=max_len, + cols=cols, + ) # top refers to the directory level if top: @@ -3166,6 +4206,7 @@ def save( "jsonldPredicate", "documentRoot", "extends", + "_doc", ] ) @@ -3181,6 +4222,7 @@ def __init__( self, name: Any, type: Any, + _doc: Any, inVocab: Optional[Any] = None, doc: Optional[Any] = None, docParent: Optional[Any] = None, @@ -3205,6 +4247,7 @@ def __init__( self.docChild = docChild self.docAfter = docAfter self.type = type + self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, Documentation): @@ -3216,6 +4259,7 @@ def __eq__(self, other: Any) -> bool: and self.docChild == other.docChild and self.docAfter == other.docAfter and self.type == other.type + and self._doc == other._doc ) return False @@ -3229,6 +4273,7 @@ def __hash__(self) -> int: self.docChild, self.docAfter, self.type, + self._doc, ) ) @@ -3388,7 +4433,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `inVocab`, `doc`, `docParent`, `docChild`, `docAfter`, `type`".format( + "invalid field `{}`, expected one of: `name`, `inVocab`, `doc`, `docParent`, `docChild`, `docAfter`, `type`, `_doc`".format( k ), SourceLine(_doc, k, str), @@ -3406,6 +4451,7 @@ def fromDoc( docChild=docChild, docAfter=docAfter, type=type, + _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -3413,10 +4459,16 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} - + self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + ) -> CommentedMap: + r = CommentedMap() + if line_info is not None: + self._doc = line_info + if (type(self._doc) == CommentedMap): + r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + line_numbers = get_line_numbers(self._doc) + max_len = get_max_line_num(self._doc) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] @@ -3426,26 +4478,125 @@ def save( if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + max_len=max_len, + cols=cols, + ) if self.inVocab is not None: - r["inVocab"] = save( - self.inVocab, top=False, base_url=self.name, relative_uris=relative_uris + saved_val = save( + self.inVocab, + top=False, + base_url=self.name, + relative_uris=relative_uris, + doc=self._doc.get("inVocab"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["inVocab"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="inVocab", + val=r.get("inVocab"), + max_len=max_len, + cols=cols, ) if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + saved_val = save( + self.doc, + top=False, + base_url=self.name, + relative_uris=relative_uris, + doc=self._doc.get("doc"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["doc"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + max_len=max_len, + cols=cols, ) if self.docParent is not None: u = save_relative_uri(self.docParent, self.name, False, None, relative_uris) r["docParent"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="docParent", + val=r.get("docParent"), + max_len=max_len, + cols=cols, + ) if self.docChild is not None: u = save_relative_uri(self.docChild, self.name, False, None, relative_uris) r["docChild"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="docChild", + val=r.get("docChild"), + max_len=max_len, + cols=cols, + ) if self.docAfter is not None: u = save_relative_uri(self.docAfter, self.name, False, None, relative_uris) r["docAfter"] = u + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="docAfter", + val=r.get("docAfter"), + max_len=max_len, + cols=cols, + ) if self.type is not None: - r["type"] = save( - self.type, top=False, base_url=self.name, relative_uris=relative_uris + saved_val = save( + self.type, + top=False, + base_url=self.name, + relative_uris=relative_uris, + doc=self._doc.get("type"), + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r["type"] = saved_val + + max_len = add_kv( + old_doc=self._doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + max_len=max_len, + cols=cols, ) # top refers to the directory level @@ -3457,7 +4608,7 @@ def save( return r attrs = frozenset( - ["name", "inVocab", "doc", "docParent", "docChild", "docAfter", "type"] + ["name", "inVocab", "doc", "docParent", "docChild", "docAfter", "type", "_doc"] ) From 149b1ba4e09aec05c74a984b9171116d6324f2a2 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Tue, 17 Jan 2023 10:49:46 -0700 Subject: [PATCH 15/44] updating type -> asinstance and bug fix in save --- schema_salad/python_codegen.py | 2 +- schema_salad/python_codegen_support.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/schema_salad/python_codegen.py b/schema_salad/python_codegen.py index bcb71a3d4..f747a2108 100644 --- a/schema_salad/python_codegen.py +++ b/schema_salad/python_codegen.py @@ -281,7 +281,7 @@ def save( r = CommentedMap() if line_info is not None: self._doc = line_info - if (type(self._doc) == CommentedMap): + if isinstance(self._doc, CommentedMap): r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) line_numbers = get_line_numbers(self._doc) max_len = get_max_line_num(self._doc) diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index 694f60134..7f3bc918a 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -252,7 +252,7 @@ def get_line_numbers(doc: CommentedMap) -> dict[Any, dict[str, int]]: only save value info if value is hashable. """ line_numbers: Dict[Any, dict[str,int]] = {} - if type(doc) == dict: + if isinstance(doc, dict): return {} for key, value in doc.lc.data.items(): line_numbers[key] = {} @@ -274,7 +274,7 @@ def get_max_line_num(doc: CommentedMap) -> int: max_line = 0 max_key = "" cur = doc - while type(cur) == CommentedMap and len(cur) > 0: + while isinstance(cur, CommentedMap) and len(cur) > 0: for key in cur.lc.data.keys(): if cur.lc.data[key][2] >= max_line: max_line = cur.lc.data[key][2] @@ -303,6 +303,7 @@ def save( if doc: if i in doc.lc.data: r.lc.data[i] = doc.lc.data[i] + if isinstance(doc, CommentedSeq): r.append(save(val[i], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc[i])) else: r.append(save(val[i], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc)) From 16996bb0789f36f038c1bf6e7cd2c08309259c6c Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Tue, 17 Jan 2023 10:53:29 -0700 Subject: [PATCH 16/44] Adding doc = copy.copy(doc) before removing values --- schema_salad/python_codegen_support.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index 7f3bc918a..13d4d85f5 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -813,7 +813,7 @@ def _document_load( # for k, v in doc.items() # if k not in ("$namespaces", "$schemas", "$base") # } - + doc = copy.copy(doc) if "$namespaces" in doc: doc.pop("$namespaces") if "$schemas" in doc: From 0522ce83f0485eb3522c342aea753a7c975afb88 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Tue, 17 Jan 2023 10:54:51 -0700 Subject: [PATCH 17/44] removing typecheck for key in val --- schema_salad/python_codegen_support.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index 13d4d85f5..558b75b4e 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -316,17 +316,11 @@ def save( newdict = CommentedMap() for key in val: if doc: - if isinstance(key, (int, float, bool, str)): - if key in doc: - newdict.lc.add_kv_line_col(key, doc.lc.data[key]) - newdict[key] = save( - val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc.get(key) - ) - else: - newdict[key] = save( - val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc - ) - + if key in doc: + newdict.lc.add_kv_line_col(key, doc.lc.data[key]) + newdict[key] = save( + val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc.get(key) + ) else: newdict[key] = save( val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc From 6f62fb8c3fd118be1acbc1fecee4d2b140d4fb33 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Wed, 18 Jan 2023 11:12:33 -0700 Subject: [PATCH 18/44] running make cleanup --- schema_salad/python_codegen_support.py | 59 ++++++++++++++++++++----- schema_salad/tests/test_line_numbers.py | 20 +++++---- 2 files changed, 61 insertions(+), 18 deletions(-) diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index 558b75b4e..ef1d231ff 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -189,7 +189,11 @@ def fromDoc( @abstractmethod def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + line_info: Optional[CommentedMap] = None, ) -> CommentedMap: """Convert this object to a JSON/YAML friendly dictionary.""" @@ -219,7 +223,16 @@ def load_field(val, fieldtype, baseuri, loadingOptions): Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str] ] -def add_kv(old_doc: CommentedMap, new_doc: CommentedMap, line_numbers: dict[Any,dict[str,int]], key: str, val: Any, max_len: int, cols: dict[int,int])->int: + +def add_kv( + old_doc: CommentedMap, + new_doc: CommentedMap, + line_numbers: dict[Any, dict[str, int]], + key: str, + val: Any, + max_len: int, + cols: dict[int, int], +) -> int: """Add key value pair into Commented Map. Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers for each key/val pair in the old CommentedMap, @@ -251,7 +264,7 @@ def get_line_numbers(doc: CommentedMap) -> dict[Any, dict[str, int]]: For each key/value pair in a CommentedMap, save the line/col info into a dictionary, only save value info if value is hashable. """ - line_numbers: Dict[Any, dict[str,int]] = {} + line_numbers: Dict[Any, dict[str, int]] = {} if isinstance(doc, dict): return {} for key, value in doc.lc.data.items(): @@ -295,7 +308,9 @@ def save( Recursively calls save method from class if val is of type Saveable. Otherwise, saves val to CommentedMap or CommentedSeq """ if isinstance(val, Saveable): - return val.save(top=top, base_url=base_url, relative_uris=relative_uris, line_info=doc) + return val.save( + top=top, base_url=base_url, relative_uris=relative_uris, line_info=doc + ) if isinstance(val, MutableSequence): r = CommentedSeq() r.lc.data = {} @@ -304,9 +319,25 @@ def save( if i in doc.lc.data: r.lc.data[i] = doc.lc.data[i] if isinstance(doc, CommentedSeq): - r.append(save(val[i], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc[i])) + r.append( + save( + val[i], + top=False, + base_url=base_url, + relative_uris=relative_uris, + doc=doc[i], + ) + ) else: - r.append(save(val[i], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc)) + r.append( + save( + val[i], + top=False, + base_url=base_url, + relative_uris=relative_uris, + doc=doc, + ) + ) return r # return [ # save(v, top=False, base_url=base_url, relative_uris=relative_uris) @@ -319,11 +350,19 @@ def save( if key in doc: newdict.lc.add_kv_line_col(key, doc.lc.data[key]) newdict[key] = save( - val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc.get(key) - ) - else: + val[key], + top=False, + base_url=base_url, + relative_uris=relative_uris, + doc=doc.get(key), + ) + else: newdict[key] = save( - val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc + val[key], + top=False, + base_url=base_url, + relative_uris=relative_uris, + doc=doc, ) return newdict # newdict = {} diff --git a/schema_salad/tests/test_line_numbers.py b/schema_salad/tests/test_line_numbers.py index 8a2a59650..415e91e4f 100644 --- a/schema_salad/tests/test_line_numbers.py +++ b/schema_salad/tests/test_line_numbers.py @@ -1,14 +1,16 @@ -#from parser import load_document_by_uri, save +# from parser import load_document_by_uri, save from pathlib import Path -from schema_salad.utils import yaml_no_ts -from ruamel.yaml.comments import CommentedMap, CommentedSeq from typing import Any, Dict, List, Optional, cast + +from ruamel.yaml.comments import CommentedMap, CommentedSeq + from schema_salad import codegen from schema_salad.avro.schema import Names from schema_salad.schema import load_schema +from schema_salad.utils import yaml_no_ts -def compare_line_numbers(original_doc:CommentedMap, codegen_doc:CommentedMap)->None: +def compare_line_numbers(original_doc: CommentedMap, codegen_doc: CommentedMap) -> None: assert type(original_doc) == CommentedMap assert type(codegen_doc) == CommentedMap @@ -17,7 +19,7 @@ def compare_line_numbers(original_doc:CommentedMap, codegen_doc:CommentedMap)->N for key, lc_info in original_doc.lc.data.items(): assert key in codegen_doc.lc.data - assert lc_info==codegen_doc.lc.data[key] + assert lc_info == codegen_doc.lc.data[key] max_line = get_max_line_number(original_doc) @@ -27,7 +29,8 @@ def compare_line_numbers(original_doc:CommentedMap, codegen_doc:CommentedMap)->N assert lc_info == [max_line, 0, max_line, len(key) + 2] max_line += 1 -def get_max_line_number(original_doc:CommentedMap)->int: + +def get_max_line_number(original_doc: CommentedMap) -> int: max_key = "" max_line = 0 temp_doc = original_doc @@ -39,6 +42,7 @@ def get_max_line_number(original_doc:CommentedMap)->int: temp_doc = temp_doc[max_key] return max_line + 1 + def python_codegen( file_uri: str, target: Path, @@ -60,5 +64,5 @@ def python_codegen( document_loader, target=str(target), parser_info=parser_info, - package=package - ) \ No newline at end of file + package=package, + ) From ba2dd90cf22ae348318e3009c2d2f5e4fd7d1f62 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Wed, 18 Jan 2023 11:21:26 -0700 Subject: [PATCH 19/44] updating metaschema.py --- schema_salad/metaschema.py | 175 ++++++++++++++++++++++++++----------- 1 file changed, 124 insertions(+), 51 deletions(-) diff --git a/schema_salad/metaschema.py b/schema_salad/metaschema.py index 1f9e8e290..18ce82b9f 100644 --- a/schema_salad/metaschema.py +++ b/schema_salad/metaschema.py @@ -192,7 +192,11 @@ def fromDoc( @abstractmethod def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + line_info: Optional[CommentedMap] = None, ) -> CommentedMap: """Convert this object to a JSON/YAML friendly dictionary.""" @@ -222,7 +226,16 @@ def load_field(val, fieldtype, baseuri, loadingOptions): Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str] ] -def add_kv(old_doc: CommentedMap, new_doc: CommentedMap, line_numbers: dict[Any,dict[str,int]], key: str, val: Any, max_len: int, cols: dict[int,int])->int: + +def add_kv( + old_doc: CommentedMap, + new_doc: CommentedMap, + line_numbers: dict[Any, dict[str, int]], + key: str, + val: Any, + max_len: int, + cols: dict[int, int], +) -> int: """Add key value pair into Commented Map. Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers for each key/val pair in the old CommentedMap, @@ -254,7 +267,7 @@ def get_line_numbers(doc: CommentedMap) -> dict[Any, dict[str, int]]: For each key/value pair in a CommentedMap, save the line/col info into a dictionary, only save value info if value is hashable. """ - line_numbers: Dict[Any, dict[str,int]] = {} + line_numbers: Dict[Any, dict[str, int]] = {} if type(doc) == dict: return {} for key, value in doc.lc.data.items(): @@ -298,7 +311,9 @@ def save( Recursively calls save method from class if val is of type Saveable. Otherwise, saves val to CommentedMap or CommentedSeq """ if isinstance(val, Saveable): - return val.save(top=top, base_url=base_url, relative_uris=relative_uris, line_info=doc) + return val.save( + top=top, base_url=base_url, relative_uris=relative_uris, line_info=doc + ) if isinstance(val, MutableSequence): r = CommentedSeq() r.lc.data = {} @@ -306,9 +321,25 @@ def save( if doc: if i in doc.lc.data: r.lc.data[i] = doc.lc.data[i] - r.append(save(val[i], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc[i])) + r.append( + save( + val[i], + top=False, + base_url=base_url, + relative_uris=relative_uris, + doc=doc[i], + ) + ) else: - r.append(save(val[i], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc)) + r.append( + save( + val[i], + top=False, + base_url=base_url, + relative_uris=relative_uris, + doc=doc, + ) + ) return r # return [ # save(v, top=False, base_url=base_url, relative_uris=relative_uris) @@ -322,16 +353,28 @@ def save( if key in doc: newdict.lc.add_kv_line_col(key, doc.lc.data[key]) newdict[key] = save( - val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc.get(key) + val[key], + top=False, + base_url=base_url, + relative_uris=relative_uris, + doc=doc.get(key), ) else: newdict[key] = save( - val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc + val[key], + top=False, + base_url=base_url, + relative_uris=relative_uris, + doc=doc, ) - - else: + + else: newdict[key] = save( - val[key], top=False, base_url=base_url, relative_uris=relative_uris, doc=doc + val[key], + top=False, + base_url=base_url, + relative_uris=relative_uris, + doc=doc, ) return newdict # newdict = {} @@ -986,7 +1029,6 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: - if extension_fields: self.extension_fields = extension_fields else: @@ -1119,13 +1161,17 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + line_info: Optional[CommentedMap] = None, ) -> CommentedMap: r = CommentedMap() if line_info is not None: self._doc = line_info - if (type(self._doc) == CommentedMap): - r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + if type(self._doc) == CommentedMap: + r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) line_numbers = get_line_numbers(self._doc) max_len = get_max_line_num(self._doc) cols: Dict[int, int] = {} @@ -1218,7 +1264,6 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: - if extension_fields: self.extension_fields = extension_fields else: @@ -1320,13 +1365,17 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + line_info: Optional[CommentedMap] = None, ) -> CommentedMap: r = CommentedMap() if line_info is not None: self._doc = line_info - if (type(self._doc) == CommentedMap): - r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + if type(self._doc) == CommentedMap: + r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) line_numbers = get_line_numbers(self._doc) max_len = get_max_line_num(self._doc) cols: Dict[int, int] = {} @@ -1413,7 +1462,6 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: - if extension_fields: self.extension_fields = extension_fields else: @@ -1543,13 +1591,17 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + line_info: Optional[CommentedMap] = None, ) -> CommentedMap: r = CommentedMap() if line_info is not None: self._doc = line_info - if (type(self._doc) == CommentedMap): - r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + if type(self._doc) == CommentedMap: + r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) line_numbers = get_line_numbers(self._doc) max_len = get_max_line_num(self._doc) cols: Dict[int, int] = {} @@ -1629,7 +1681,6 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: - if extension_fields: self.extension_fields = extension_fields else: @@ -1728,13 +1779,17 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + line_info: Optional[CommentedMap] = None, ) -> CommentedMap: r = CommentedMap() if line_info is not None: self._doc = line_info - if (type(self._doc) == CommentedMap): - r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + if type(self._doc) == CommentedMap: + r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) line_numbers = get_line_numbers(self._doc) max_len = get_max_line_num(self._doc) cols: Dict[int, int] = {} @@ -1817,7 +1872,6 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: - if extension_fields: self.extension_fields = extension_fields else: @@ -2126,13 +2180,17 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + line_info: Optional[CommentedMap] = None, ) -> CommentedMap: r = CommentedMap() if line_info is not None: self._doc = line_info - if (type(self._doc) == CommentedMap): - r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + if type(self._doc) == CommentedMap: + r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) line_numbers = get_line_numbers(self._doc) max_len = get_max_line_num(self._doc) cols: Dict[int, int] = {} @@ -2440,7 +2498,6 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: - if extension_fields: self.extension_fields = extension_fields else: @@ -2539,13 +2596,17 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + line_info: Optional[CommentedMap] = None, ) -> CommentedMap: r = CommentedMap() if line_info is not None: self._doc = line_info - if (type(self._doc) == CommentedMap): - r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + if type(self._doc) == CommentedMap: + r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) line_numbers = get_line_numbers(self._doc) max_len = get_max_line_num(self._doc) cols: Dict[int, int] = {} @@ -2626,7 +2687,6 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: - if extension_fields: self.extension_fields = extension_fields else: @@ -2810,13 +2870,17 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + line_info: Optional[CommentedMap] = None, ) -> CommentedMap: r = CommentedMap() if line_info is not None: self._doc = line_info - if (type(self._doc) == CommentedMap): - r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + if type(self._doc) == CommentedMap: + r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) line_numbers = get_line_numbers(self._doc) max_len = get_max_line_num(self._doc) cols: Dict[int, int] = {} @@ -2970,7 +3034,6 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: - if extension_fields: self.extension_fields = extension_fields else: @@ -3330,13 +3393,17 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + line_info: Optional[CommentedMap] = None, ) -> CommentedMap: r = CommentedMap() if line_info is not None: self._doc = line_info - if (type(self._doc) == CommentedMap): - r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + if type(self._doc) == CommentedMap: + r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) line_numbers = get_line_numbers(self._doc) max_len = get_max_line_num(self._doc) cols: Dict[int, int] = {} @@ -3658,7 +3725,6 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: - if extension_fields: self.extension_fields = extension_fields else: @@ -3971,13 +4037,17 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + line_info: Optional[CommentedMap] = None, ) -> CommentedMap: r = CommentedMap() if line_info is not None: self._doc = line_info - if (type(self._doc) == CommentedMap): - r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + if type(self._doc) == CommentedMap: + r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) line_numbers = get_line_numbers(self._doc) max_len = get_max_line_num(self._doc) cols: Dict[int, int] = {} @@ -4231,7 +4301,6 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: - if extension_fields: self.extension_fields = extension_fields else: @@ -4459,13 +4528,17 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + line_info: Optional[CommentedMap] = None, ) -> CommentedMap: r = CommentedMap() if line_info is not None: self._doc = line_info - if (type(self._doc) == CommentedMap): - r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) + if type(self._doc) == CommentedMap: + r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) line_numbers = get_line_numbers(self._doc) max_len = get_max_line_num(self._doc) cols: Dict[int, int] = {} From 2dea597ff7ad7aa7194a3c6064d1dd7aa12c3305 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 23 Jan 2023 09:37:50 -0700 Subject: [PATCH 20/44] Fixing type warning for doc --- schema_salad/metaschema.py | 663 ++++++++++++++----------- schema_salad/python_codegen.py | 37 +- schema_salad/python_codegen_support.py | 88 ++-- 3 files changed, 431 insertions(+), 357 deletions(-) diff --git a/schema_salad/metaschema.py b/schema_salad/metaschema.py index 18ce82b9f..90a7512b0 100644 --- a/schema_salad/metaschema.py +++ b/schema_salad/metaschema.py @@ -46,6 +46,9 @@ IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] +doc_line_info = CommentedMap() + + class LoadingOptions: idx: IdxType fileuri: Optional[str] @@ -196,7 +199,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - line_info: Optional[CommentedMap] = None, + keys: Optional[list[Any]] = None, ) -> CommentedMap: """Convert this object to a JSON/YAML friendly dictionary.""" @@ -268,7 +271,7 @@ def get_line_numbers(doc: CommentedMap) -> dict[Any, dict[str, int]]: only save value info if value is hashable. """ line_numbers: Dict[Any, dict[str, int]] = {} - if type(doc) == dict: + if isinstance(doc, dict) or doc is None: return {} for key, value in doc.lc.data.items(): line_numbers[key] = {} @@ -290,7 +293,7 @@ def get_max_line_num(doc: CommentedMap) -> int: max_line = 0 max_key = "" cur = doc - while type(cur) == CommentedMap and len(cur) > 0: + while isinstance(cur, CommentedMap) and len(cur) > 0: for key in cur.lc.data.keys(): if cur.lc.data[key][2] >= max_line: max_line = cur.lc.data[key][2] @@ -304,42 +307,49 @@ def save( top: bool = True, base_url: str = "", relative_uris: bool = True, - doc: Optional[CommentedMap] = None, + keys: Optional[list[Any]] = None, ) -> save_type: """Save a val of any type. Recursively calls save method from class if val is of type Saveable. Otherwise, saves val to CommentedMap or CommentedSeq """ + if keys is None: + keys = [] + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + if isinstance(val, Saveable): return val.save( - top=top, base_url=base_url, relative_uris=relative_uris, line_info=doc + top=top, base_url=base_url, relative_uris=relative_uris, keys=keys ) if isinstance(val, MutableSequence): r = CommentedSeq() r.lc.data = {} for i in range(0, len(val)): + new_keys = keys if doc: - if i in doc.lc.data: + if i in doc: r.lc.data[i] = doc.lc.data[i] - r.append( - save( - val[i], - top=False, - base_url=base_url, - relative_uris=relative_uris, - doc=doc[i], - ) - ) - else: - r.append( - save( - val[i], - top=False, - base_url=base_url, - relative_uris=relative_uris, - doc=doc, - ) + new_keys.append(i) + r.append( + save( + val[i], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, ) + ) return r # return [ # save(v, top=False, base_url=base_url, relative_uris=relative_uris) @@ -347,35 +357,20 @@ def save( # ] if isinstance(val, MutableMapping): newdict = CommentedMap() + new_keys = keys for key in val: if doc: - if isinstance(key, (int, float, bool, str)): - if key in doc: - newdict.lc.add_kv_line_col(key, doc.lc.data[key]) - newdict[key] = save( - val[key], - top=False, - base_url=base_url, - relative_uris=relative_uris, - doc=doc.get(key), - ) - else: - newdict[key] = save( - val[key], - top=False, - base_url=base_url, - relative_uris=relative_uris, - doc=doc, - ) + if key in doc: + newdict.lc.add_kv_line_col(key, doc.lc.data[key]) + new_keys.append(key) - else: - newdict[key] = save( - val[key], - top=False, - base_url=base_url, - relative_uris=relative_uris, - doc=doc, - ) + newdict[key] = save( + val[key], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, + ) return newdict # newdict = {} # for key in val: @@ -821,7 +816,7 @@ def load(self, doc, baseuri, loadingOptions, docRoot=None): def _document_load( loader: _Loader, - doc: Union[str, MutableMapping[str, Any], MutableSequence[Any]], + doc: Union[CommentedMap, str, MutableMapping[str, Any], MutableSequence[Any]], baseuri: str, loadingOptions: LoadingOptions, addl_metadata_fields: Optional[MutableSequence[str]] = None, @@ -858,7 +853,7 @@ def _document_load( # for k, v in doc.items() # if k not in ("$namespaces", "$schemas", "$base") # } - + doc = copy.copy(doc) if "$namespaces" in doc: doc.pop("$namespaces") if "$schemas" in doc: @@ -881,7 +876,9 @@ def _document_load( loadingOptions.idx[docuri] = loadingOptions.idx[baseuri] return loadingOptions.idx[baseuri] - + if isinstance(doc, CommentedMap): + global doc_line_info + doc_line_info = doc if isinstance(doc, MutableSequence): loadingOptions.idx[baseuri] = ( loader.load(doc, baseuri, loadingOptions), @@ -1024,7 +1021,6 @@ def __init__( self, name: Any, type: Any, - _doc: Any, doc: Optional[Any] = None, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, @@ -1040,7 +1036,6 @@ def __init__( self.doc = doc self.name = name self.type = type - self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, RecordField): @@ -1048,12 +1043,11 @@ def __eq__(self, other: Any) -> bool: self.doc == other.doc and self.name == other.name and self.type == other.type - and self._doc == other._doc ) return False def __hash__(self) -> int: - return hash((self.doc, self.name, self.type, self._doc)) + return hash((self.doc, self.name, self.type)) @classmethod def fromDoc( @@ -1139,7 +1133,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `_doc`".format( + "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( k ), SourceLine(_doc, k, str), @@ -1153,7 +1147,6 @@ def fromDoc( doc=doc, name=name, type=type, - _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -1165,15 +1158,27 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - line_info: Optional[CommentedMap] = None, + keys: Optional[list[Any]] = None, ) -> CommentedMap: + if keys is None: + keys = [] r = CommentedMap() - if line_info is not None: - self._doc = line_info - if type(self._doc) == CommentedMap: - r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) - line_numbers = get_line_numbers(self._doc) - max_len = get_max_line_num(self._doc) + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -1185,7 +1190,7 @@ def save( u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="name", @@ -1199,7 +1204,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("doc"), + keys=keys + ["doc"], ) if type(saved_val) == list: @@ -1210,7 +1215,7 @@ def save( r["doc"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="doc", @@ -1224,7 +1229,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("type"), + keys=keys + ["type"], ) if type(saved_val) == list: @@ -1235,7 +1240,7 @@ def save( r["type"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="type", @@ -1252,14 +1257,13 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["doc", "name", "type", "_doc"]) + attrs = frozenset(["doc", "name", "type"]) class RecordSchema(Saveable): def __init__( self, type: Any, - _doc: Any, fields: Optional[Any] = None, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, @@ -1274,19 +1278,14 @@ def __init__( self.loadingOptions = LoadingOptions() self.fields = fields self.type = type - self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, RecordSchema): - return bool( - self.fields == other.fields - and self.type == other.type - and self._doc == other._doc - ) + return bool(self.fields == other.fields and self.type == other.type) return False def __hash__(self) -> int: - return hash((self.fields, self.type, self._doc)) + return hash((self.fields, self.type)) @classmethod def fromDoc( @@ -1345,7 +1344,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `_doc`".format( + "invalid field `{}`, expected one of: `fields`, `type`".format( k ), SourceLine(_doc, k, str), @@ -1358,7 +1357,6 @@ def fromDoc( _constructed = cls( fields=fields, type=type, - _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -1369,15 +1367,27 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - line_info: Optional[CommentedMap] = None, + keys: Optional[list[Any]] = None, ) -> CommentedMap: + if keys is None: + keys = [] r = CommentedMap() - if line_info is not None: - self._doc = line_info - if type(self._doc) == CommentedMap: - r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) - line_numbers = get_line_numbers(self._doc) - max_len = get_max_line_num(self._doc) + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -1391,7 +1401,7 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, - doc=self._doc.get("fields"), + keys=keys + ["fields"], ) if type(saved_val) == list: @@ -1402,7 +1412,7 @@ def save( r["fields"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="fields", @@ -1416,7 +1426,7 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, - doc=self._doc.get("type"), + keys=keys + ["type"], ) if type(saved_val) == list: @@ -1427,7 +1437,7 @@ def save( r["type"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="type", @@ -1444,7 +1454,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["fields", "type", "_doc"]) + attrs = frozenset(["fields", "type"]) class EnumSchema(Saveable): @@ -1457,7 +1467,6 @@ def __init__( self, symbols: Any, type: Any, - _doc: Any, name: Optional[Any] = None, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, @@ -1473,7 +1482,6 @@ def __init__( self.name = name self.symbols = symbols self.type = type - self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, EnumSchema): @@ -1481,12 +1489,11 @@ def __eq__(self, other: Any) -> bool: self.name == other.name and self.symbols == other.symbols and self.type == other.type - and self._doc == other._doc ) return False def __hash__(self) -> int: - return hash((self.name, self.symbols, self.type, self._doc)) + return hash((self.name, self.symbols, self.type)) @classmethod def fromDoc( @@ -1569,7 +1576,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `_doc`".format( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`".format( k ), SourceLine(_doc, k, str), @@ -1583,7 +1590,6 @@ def fromDoc( name=name, symbols=symbols, type=type, - _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -1595,15 +1601,27 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - line_info: Optional[CommentedMap] = None, + keys: Optional[list[Any]] = None, ) -> CommentedMap: + if keys is None: + keys = [] r = CommentedMap() - if line_info is not None: - self._doc = line_info - if type(self._doc) == CommentedMap: - r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) - line_numbers = get_line_numbers(self._doc) - max_len = get_max_line_num(self._doc) + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -1615,7 +1633,7 @@ def save( u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="name", @@ -1627,7 +1645,7 @@ def save( u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) r["symbols"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="symbols", @@ -1641,7 +1659,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("type"), + keys=keys + ["type"], ) if type(saved_val) == list: @@ -1652,7 +1670,7 @@ def save( r["type"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="type", @@ -1669,7 +1687,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["name", "symbols", "type", "_doc"]) + attrs = frozenset(["name", "symbols", "type"]) class ArraySchema(Saveable): @@ -1677,7 +1695,6 @@ def __init__( self, items: Any, type: Any, - _doc: Any, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: @@ -1691,19 +1708,14 @@ def __init__( self.loadingOptions = LoadingOptions() self.items = items self.type = type - self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, ArraySchema): - return bool( - self.items == other.items - and self.type == other.type - and self._doc == other._doc - ) + return bool(self.items == other.items and self.type == other.type) return False def __hash__(self) -> int: - return hash((self.items, self.type, self._doc)) + return hash((self.items, self.type)) @classmethod def fromDoc( @@ -1759,7 +1771,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `_doc`".format( + "invalid field `{}`, expected one of: `items`, `type`".format( k ), SourceLine(_doc, k, str), @@ -1772,7 +1784,6 @@ def fromDoc( _constructed = cls( items=items, type=type, - _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -1783,15 +1794,27 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - line_info: Optional[CommentedMap] = None, + keys: Optional[list[Any]] = None, ) -> CommentedMap: + if keys is None: + keys = [] r = CommentedMap() - if line_info is not None: - self._doc = line_info - if type(self._doc) == CommentedMap: - r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) - line_numbers = get_line_numbers(self._doc) - max_len = get_max_line_num(self._doc) + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -1803,7 +1826,7 @@ def save( u = save_relative_uri(self.items, base_url, False, 2, relative_uris) r["items"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="items", @@ -1817,7 +1840,7 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, - doc=self._doc.get("type"), + keys=keys + ["type"], ) if type(saved_val) == list: @@ -1828,7 +1851,7 @@ def save( r["type"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="type", @@ -1845,7 +1868,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["items", "type", "_doc"]) + attrs = frozenset(["items", "type"]) class JsonldPredicate(Saveable): @@ -1857,7 +1880,6 @@ class JsonldPredicate(Saveable): def __init__( self, - _doc: Any, _id: Optional[Any] = None, _type: Optional[Any] = None, _container: Optional[Any] = None, @@ -1891,7 +1913,6 @@ def __init__( self.typeDSL = typeDSL self.secondaryFilesDSL = secondaryFilesDSL self.subscope = subscope - self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, JsonldPredicate): @@ -1907,7 +1928,6 @@ def __eq__(self, other: Any) -> bool: and self.typeDSL == other.typeDSL and self.secondaryFilesDSL == other.secondaryFilesDSL and self.subscope == other.subscope - and self._doc == other._doc ) return False @@ -1925,7 +1945,6 @@ def __hash__(self) -> int: self.typeDSL, self.secondaryFilesDSL, self.subscope, - self._doc, ) ) @@ -2151,7 +2170,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `_id`, `_type`, `_container`, `identity`, `noLinkCheck`, `mapSubject`, `mapPredicate`, `refScope`, `typeDSL`, `secondaryFilesDSL`, `subscope`, `_doc`".format( + "invalid field `{}`, expected one of: `_id`, `_type`, `_container`, `identity`, `noLinkCheck`, `mapSubject`, `mapPredicate`, `refScope`, `typeDSL`, `secondaryFilesDSL`, `subscope`".format( k ), SourceLine(_doc, k, str), @@ -2173,7 +2192,6 @@ def fromDoc( typeDSL=typeDSL, secondaryFilesDSL=secondaryFilesDSL, subscope=subscope, - _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -2184,15 +2202,27 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - line_info: Optional[CommentedMap] = None, + keys: Optional[list[Any]] = None, ) -> CommentedMap: + if keys is None: + keys = [] r = CommentedMap() - if line_info is not None: - self._doc = line_info - if type(self._doc) == CommentedMap: - r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) - line_numbers = get_line_numbers(self._doc) - max_len = get_max_line_num(self._doc) + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -2204,7 +2234,7 @@ def save( u = save_relative_uri(self._id, base_url, True, None, relative_uris) r["_id"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="_id", @@ -2218,7 +2248,7 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, - doc=self._doc.get("_type"), + keys=keys + ["_type"], ) if type(saved_val) == list: @@ -2229,7 +2259,7 @@ def save( r["_type"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="_type", @@ -2243,7 +2273,7 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, - doc=self._doc.get("_container"), + keys=keys + ["_container"], ) if type(saved_val) == list: @@ -2254,7 +2284,7 @@ def save( r["_container"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="_container", @@ -2268,7 +2298,7 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, - doc=self._doc.get("identity"), + keys=keys + ["identity"], ) if type(saved_val) == list: @@ -2279,7 +2309,7 @@ def save( r["identity"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="identity", @@ -2293,7 +2323,7 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, - doc=self._doc.get("noLinkCheck"), + keys=keys + ["noLinkCheck"], ) if type(saved_val) == list: @@ -2304,7 +2334,7 @@ def save( r["noLinkCheck"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="noLinkCheck", @@ -2318,7 +2348,7 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, - doc=self._doc.get("mapSubject"), + keys=keys + ["mapSubject"], ) if type(saved_val) == list: @@ -2329,7 +2359,7 @@ def save( r["mapSubject"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="mapSubject", @@ -2343,7 +2373,7 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, - doc=self._doc.get("mapPredicate"), + keys=keys + ["mapPredicate"], ) if type(saved_val) == list: @@ -2354,7 +2384,7 @@ def save( r["mapPredicate"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="mapPredicate", @@ -2368,7 +2398,7 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, - doc=self._doc.get("refScope"), + keys=keys + ["refScope"], ) if type(saved_val) == list: @@ -2379,7 +2409,7 @@ def save( r["refScope"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="refScope", @@ -2393,7 +2423,7 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, - doc=self._doc.get("typeDSL"), + keys=keys + ["typeDSL"], ) if type(saved_val) == list: @@ -2404,7 +2434,7 @@ def save( r["typeDSL"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="typeDSL", @@ -2418,7 +2448,7 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, - doc=self._doc.get("secondaryFilesDSL"), + keys=keys + ["secondaryFilesDSL"], ) if type(saved_val) == list: @@ -2429,7 +2459,7 @@ def save( r["secondaryFilesDSL"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="secondaryFilesDSL", @@ -2443,7 +2473,7 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, - doc=self._doc.get("subscope"), + keys=keys + ["subscope"], ) if type(saved_val) == list: @@ -2454,7 +2484,7 @@ def save( r["subscope"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="subscope", @@ -2484,7 +2514,6 @@ def save( "typeDSL", "secondaryFilesDSL", "subscope", - "_doc", ] ) @@ -2494,7 +2523,6 @@ def __init__( self, specializeFrom: Any, specializeTo: Any, - _doc: Any, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: @@ -2508,19 +2536,17 @@ def __init__( self.loadingOptions = LoadingOptions() self.specializeFrom = specializeFrom self.specializeTo = specializeTo - self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, SpecializeDef): return bool( self.specializeFrom == other.specializeFrom and self.specializeTo == other.specializeTo - and self._doc == other._doc ) return False def __hash__(self) -> int: - return hash((self.specializeFrom, self.specializeTo, self._doc)) + return hash((self.specializeFrom, self.specializeTo)) @classmethod def fromDoc( @@ -2576,7 +2602,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `specializeFrom`, `specializeTo`, `_doc`".format( + "invalid field `{}`, expected one of: `specializeFrom`, `specializeTo`".format( k ), SourceLine(_doc, k, str), @@ -2589,7 +2615,6 @@ def fromDoc( _constructed = cls( specializeFrom=specializeFrom, specializeTo=specializeTo, - _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -2600,15 +2625,27 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - line_info: Optional[CommentedMap] = None, + keys: Optional[list[Any]] = None, ) -> CommentedMap: + if keys is None: + keys = [] r = CommentedMap() - if line_info is not None: - self._doc = line_info - if type(self._doc) == CommentedMap: - r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) - line_numbers = get_line_numbers(self._doc) - max_len = get_max_line_num(self._doc) + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -2622,7 +2659,7 @@ def save( ) r["specializeFrom"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="specializeFrom", @@ -2634,7 +2671,7 @@ def save( u = save_relative_uri(self.specializeTo, base_url, False, 1, relative_uris) r["specializeTo"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="specializeTo", @@ -2651,7 +2688,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["specializeFrom", "specializeTo", "_doc"]) + attrs = frozenset(["specializeFrom", "specializeTo"]) class NamedType(Saveable): @@ -2680,7 +2717,6 @@ def __init__( self, name: Any, type: Any, - _doc: Any, doc: Optional[Any] = None, jsonldPredicate: Optional[Any] = None, default: Optional[Any] = None, @@ -2700,7 +2736,6 @@ def __init__( self.type = type self.jsonldPredicate = jsonldPredicate self.default = default - self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, SaladRecordField): @@ -2710,20 +2745,12 @@ def __eq__(self, other: Any) -> bool: and self.type == other.type and self.jsonldPredicate == other.jsonldPredicate and self.default == other.default - and self._doc == other._doc ) return False def __hash__(self) -> int: return hash( - ( - self.doc, - self.name, - self.type, - self.jsonldPredicate, - self.default, - self._doc, - ) + (self.doc, self.name, self.type, self.jsonldPredicate, self.default) ) @classmethod @@ -2846,7 +2873,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `jsonldPredicate`, `default`, `_doc`".format( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `jsonldPredicate`, `default`".format( k ), SourceLine(_doc, k, str), @@ -2862,7 +2889,6 @@ def fromDoc( type=type, jsonldPredicate=jsonldPredicate, default=default, - _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -2874,15 +2900,27 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - line_info: Optional[CommentedMap] = None, + keys: Optional[list[Any]] = None, ) -> CommentedMap: + if keys is None: + keys = [] r = CommentedMap() - if line_info is not None: - self._doc = line_info - if type(self._doc) == CommentedMap: - r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) - line_numbers = get_line_numbers(self._doc) - max_len = get_max_line_num(self._doc) + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -2894,7 +2932,7 @@ def save( u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="name", @@ -2908,7 +2946,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("doc"), + keys=keys + ["doc"], ) if type(saved_val) == list: @@ -2919,7 +2957,7 @@ def save( r["doc"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="doc", @@ -2933,7 +2971,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("type"), + keys=keys + ["type"], ) if type(saved_val) == list: @@ -2944,7 +2982,7 @@ def save( r["type"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="type", @@ -2958,7 +2996,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("jsonldPredicate"), + keys=keys + ["jsonldPredicate"], ) if type(saved_val) == list: @@ -2969,7 +3007,7 @@ def save( r["jsonldPredicate"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="jsonldPredicate", @@ -2983,7 +3021,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("default"), + keys=keys + ["default"], ) if type(saved_val) == list: @@ -2994,7 +3032,7 @@ def save( r["default"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="default", @@ -3011,7 +3049,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["doc", "name", "type", "jsonldPredicate", "default", "_doc"]) + attrs = frozenset(["doc", "name", "type", "jsonldPredicate", "default"]) class SaladRecordSchema(NamedType, RecordSchema, SchemaDefinedType): @@ -3019,7 +3057,6 @@ def __init__( self, name: Any, type: Any, - _doc: Any, inVocab: Optional[Any] = None, fields: Optional[Any] = None, doc: Optional[Any] = None, @@ -3055,7 +3092,6 @@ def __init__( self.abstract = abstract self.extends = extends self.specialize = specialize - self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, SaladRecordSchema): @@ -3073,7 +3109,6 @@ def __eq__(self, other: Any) -> bool: and self.abstract == other.abstract and self.extends == other.extends and self.specialize == other.specialize - and self._doc == other._doc ) return False @@ -3093,7 +3128,6 @@ def __hash__(self) -> int: self.abstract, self.extends, self.specialize, - self._doc, ) ) @@ -3361,7 +3395,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `inVocab`, `fields`, `type`, `doc`, `docParent`, `docChild`, `docAfter`, `jsonldPredicate`, `documentRoot`, `abstract`, `extends`, `specialize`, `_doc`".format( + "invalid field `{}`, expected one of: `name`, `inVocab`, `fields`, `type`, `doc`, `docParent`, `docChild`, `docAfter`, `jsonldPredicate`, `documentRoot`, `abstract`, `extends`, `specialize`".format( k ), SourceLine(_doc, k, str), @@ -3385,7 +3419,6 @@ def fromDoc( abstract=abstract, extends=extends, specialize=specialize, - _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -3397,15 +3430,27 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - line_info: Optional[CommentedMap] = None, + keys: Optional[list[Any]] = None, ) -> CommentedMap: + if keys is None: + keys = [] r = CommentedMap() - if line_info is not None: - self._doc = line_info - if type(self._doc) == CommentedMap: - r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) - line_numbers = get_line_numbers(self._doc) - max_len = get_max_line_num(self._doc) + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -3417,7 +3462,7 @@ def save( u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="name", @@ -3431,7 +3476,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("inVocab"), + keys=keys + ["inVocab"], ) if type(saved_val) == list: @@ -3442,7 +3487,7 @@ def save( r["inVocab"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="inVocab", @@ -3456,7 +3501,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("fields"), + keys=keys + ["fields"], ) if type(saved_val) == list: @@ -3467,7 +3512,7 @@ def save( r["fields"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="fields", @@ -3481,7 +3526,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("type"), + keys=keys + ["type"], ) if type(saved_val) == list: @@ -3492,7 +3537,7 @@ def save( r["type"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="type", @@ -3506,7 +3551,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("doc"), + keys=keys + ["doc"], ) if type(saved_val) == list: @@ -3517,7 +3562,7 @@ def save( r["doc"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="doc", @@ -3529,7 +3574,7 @@ def save( u = save_relative_uri(self.docParent, self.name, False, None, relative_uris) r["docParent"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="docParent", @@ -3541,7 +3586,7 @@ def save( u = save_relative_uri(self.docChild, self.name, False, None, relative_uris) r["docChild"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="docChild", @@ -3553,7 +3598,7 @@ def save( u = save_relative_uri(self.docAfter, self.name, False, None, relative_uris) r["docAfter"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="docAfter", @@ -3567,7 +3612,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("jsonldPredicate"), + keys=keys + ["jsonldPredicate"], ) if type(saved_val) == list: @@ -3578,7 +3623,7 @@ def save( r["jsonldPredicate"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="jsonldPredicate", @@ -3592,7 +3637,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("documentRoot"), + keys=keys + ["documentRoot"], ) if type(saved_val) == list: @@ -3603,7 +3648,7 @@ def save( r["documentRoot"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="documentRoot", @@ -3617,7 +3662,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("abstract"), + keys=keys + ["abstract"], ) if type(saved_val) == list: @@ -3628,7 +3673,7 @@ def save( r["abstract"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="abstract", @@ -3640,7 +3685,7 @@ def save( u = save_relative_uri(self.extends, self.name, False, 1, relative_uris) r["extends"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="extends", @@ -3654,7 +3699,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("specialize"), + keys=keys + ["specialize"], ) if type(saved_val) == list: @@ -3665,7 +3710,7 @@ def save( r["specialize"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="specialize", @@ -3697,7 +3742,6 @@ def save( "abstract", "extends", "specialize", - "_doc", ] ) @@ -3712,7 +3756,6 @@ def __init__( self, symbols: Any, type: Any, - _doc: Any, name: Optional[Any] = None, inVocab: Optional[Any] = None, doc: Optional[Any] = None, @@ -3744,7 +3787,6 @@ def __init__( self.jsonldPredicate = jsonldPredicate self.documentRoot = documentRoot self.extends = extends - self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, SaladEnumSchema): @@ -3760,7 +3802,6 @@ def __eq__(self, other: Any) -> bool: and self.jsonldPredicate == other.jsonldPredicate and self.documentRoot == other.documentRoot and self.extends == other.extends - and self._doc == other._doc ) return False @@ -3778,7 +3819,6 @@ def __hash__(self) -> int: self.jsonldPredicate, self.documentRoot, self.extends, - self._doc, ) ) @@ -4007,7 +4047,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `inVocab`, `symbols`, `type`, `doc`, `docParent`, `docChild`, `docAfter`, `jsonldPredicate`, `documentRoot`, `extends`, `_doc`".format( + "invalid field `{}`, expected one of: `name`, `inVocab`, `symbols`, `type`, `doc`, `docParent`, `docChild`, `docAfter`, `jsonldPredicate`, `documentRoot`, `extends`".format( k ), SourceLine(_doc, k, str), @@ -4029,7 +4069,6 @@ def fromDoc( jsonldPredicate=jsonldPredicate, documentRoot=documentRoot, extends=extends, - _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -4041,15 +4080,27 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - line_info: Optional[CommentedMap] = None, + keys: Optional[list[Any]] = None, ) -> CommentedMap: + if keys is None: + keys = [] r = CommentedMap() - if line_info is not None: - self._doc = line_info - if type(self._doc) == CommentedMap: - r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) - line_numbers = get_line_numbers(self._doc) - max_len = get_max_line_num(self._doc) + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -4061,7 +4112,7 @@ def save( u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="name", @@ -4075,7 +4126,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("inVocab"), + keys=keys + ["inVocab"], ) if type(saved_val) == list: @@ -4086,7 +4137,7 @@ def save( r["inVocab"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="inVocab", @@ -4098,7 +4149,7 @@ def save( u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) r["symbols"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="symbols", @@ -4112,7 +4163,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("type"), + keys=keys + ["type"], ) if type(saved_val) == list: @@ -4123,7 +4174,7 @@ def save( r["type"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="type", @@ -4137,7 +4188,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("doc"), + keys=keys + ["doc"], ) if type(saved_val) == list: @@ -4148,7 +4199,7 @@ def save( r["doc"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="doc", @@ -4160,7 +4211,7 @@ def save( u = save_relative_uri(self.docParent, self.name, False, None, relative_uris) r["docParent"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="docParent", @@ -4172,7 +4223,7 @@ def save( u = save_relative_uri(self.docChild, self.name, False, None, relative_uris) r["docChild"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="docChild", @@ -4184,7 +4235,7 @@ def save( u = save_relative_uri(self.docAfter, self.name, False, None, relative_uris) r["docAfter"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="docAfter", @@ -4198,7 +4249,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("jsonldPredicate"), + keys=keys + ["jsonldPredicate"], ) if type(saved_val) == list: @@ -4209,7 +4260,7 @@ def save( r["jsonldPredicate"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="jsonldPredicate", @@ -4223,7 +4274,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("documentRoot"), + keys=keys + ["documentRoot"], ) if type(saved_val) == list: @@ -4234,7 +4285,7 @@ def save( r["documentRoot"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="documentRoot", @@ -4246,7 +4297,7 @@ def save( u = save_relative_uri(self.extends, self.name, False, 1, relative_uris) r["extends"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="extends", @@ -4276,7 +4327,6 @@ def save( "jsonldPredicate", "documentRoot", "extends", - "_doc", ] ) @@ -4292,7 +4342,6 @@ def __init__( self, name: Any, type: Any, - _doc: Any, inVocab: Optional[Any] = None, doc: Optional[Any] = None, docParent: Optional[Any] = None, @@ -4316,7 +4365,6 @@ def __init__( self.docChild = docChild self.docAfter = docAfter self.type = type - self._doc = _doc def __eq__(self, other: Any) -> bool: if isinstance(other, Documentation): @@ -4328,7 +4376,6 @@ def __eq__(self, other: Any) -> bool: and self.docChild == other.docChild and self.docAfter == other.docAfter and self.type == other.type - and self._doc == other._doc ) return False @@ -4342,7 +4389,6 @@ def __hash__(self) -> int: self.docChild, self.docAfter, self.type, - self._doc, ) ) @@ -4502,7 +4548,7 @@ def fromDoc( else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `inVocab`, `doc`, `docParent`, `docChild`, `docAfter`, `type`, `_doc`".format( + "invalid field `{}`, expected one of: `name`, `inVocab`, `doc`, `docParent`, `docChild`, `docAfter`, `type`".format( k ), SourceLine(_doc, k, str), @@ -4520,7 +4566,6 @@ def fromDoc( docChild=docChild, docAfter=docAfter, type=type, - _doc=_doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -4532,15 +4577,27 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - line_info: Optional[CommentedMap] = None, + keys: Optional[list[Any]] = None, ) -> CommentedMap: + if keys is None: + keys = [] r = CommentedMap() - if line_info is not None: - self._doc = line_info - if type(self._doc) == CommentedMap: - r._yaml_set_line_col(self._doc.lc.line, self._doc.lc.col) - line_numbers = get_line_numbers(self._doc) - max_len = get_max_line_num(self._doc) + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -4552,7 +4609,7 @@ def save( u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="name", @@ -4566,7 +4623,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("inVocab"), + keys=keys + ["inVocab"], ) if type(saved_val) == list: @@ -4577,7 +4634,7 @@ def save( r["inVocab"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="inVocab", @@ -4591,7 +4648,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("doc"), + keys=keys + ["doc"], ) if type(saved_val) == list: @@ -4602,7 +4659,7 @@ def save( r["doc"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="doc", @@ -4614,7 +4671,7 @@ def save( u = save_relative_uri(self.docParent, self.name, False, None, relative_uris) r["docParent"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="docParent", @@ -4626,7 +4683,7 @@ def save( u = save_relative_uri(self.docChild, self.name, False, None, relative_uris) r["docChild"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="docChild", @@ -4638,7 +4695,7 @@ def save( u = save_relative_uri(self.docAfter, self.name, False, None, relative_uris) r["docAfter"] = u max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="docAfter", @@ -4652,7 +4709,7 @@ def save( top=False, base_url=self.name, relative_uris=relative_uris, - doc=self._doc.get("type"), + keys=keys + ["type"], ) if type(saved_val) == list: @@ -4663,7 +4720,7 @@ def save( r["type"] = saved_val max_len = add_kv( - old_doc=self._doc, + old_doc=doc, new_doc=r, line_numbers=line_numbers, key="type", @@ -4681,7 +4738,7 @@ def save( return r attrs = frozenset( - ["name", "inVocab", "doc", "docParent", "docChild", "docAfter", "type", "_doc"] + ["name", "inVocab", "doc", "docParent", "docChild", "docAfter", "type"] ) diff --git a/schema_salad/python_codegen.py b/schema_salad/python_codegen.py index f747a2108..1cb7c64c9 100644 --- a/schema_salad/python_codegen.py +++ b/schema_salad/python_codegen.py @@ -170,8 +170,6 @@ def begin_class( self.out.write(" pass\n\n\n") return - field_names.append("_doc") - required_field_names = [f for f in field_names if f not in optional_fields] optional_field_names = [f for f in field_names if f in optional_fields] @@ -276,15 +274,28 @@ def fromDoc( self.serializer.write( """ def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, line_info: Optional[CommentedMap] = None + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None ) -> CommentedMap: + if keys is None: + keys = [] r = CommentedMap() - if line_info is not None: - self._doc = line_info - if isinstance(self._doc, CommentedMap): - r._yaml_set_line_col(self._doc.lc.line,self._doc.lc.col) - line_numbers = get_line_numbers(self._doc) - max_len = get_max_line_num(self._doc) + doc = doc_line_info + for key in keys: + + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -309,7 +320,7 @@ def save( self.serializer.write( """ r["class"] = "{class_}" - max_len = add_kv(old_doc=self._doc, new_doc=r, line_numbers=line_numbers, key="class", val=r.get("class"), max_len=max_len, cols=cols) + max_len = add_kv(old_doc=doc, new_doc=r, line_numbers=line_numbers, key="class", val=r.get("class"), max_len=max_len, cols=cols) """.format( class_=classname ) @@ -575,7 +586,7 @@ def declare_field( if self.{safename} is not None: u = save_relative_uri(self.{safename}, {baseurl}, {scoped_id}, {ref_scope}, relative_uris) r["{fieldname}"] = u - max_len = add_kv(old_doc = self._doc, new_doc = r, line_numbers = line_numbers, key = "{key_1}", val = r.get("{key_2}"), max_len = max_len, cols = cols) + max_len = add_kv(old_doc = doc, new_doc = r, line_numbers = line_numbers, key = "{key_1}", val = r.get("{key_2}"), max_len = max_len, cols = cols) """.format( safename=self.safe_name(name), fieldname=shortname(name).strip(), @@ -594,7 +605,7 @@ def declare_field( """ if self.{safename} is not None: saved_val = save( - self.{safename}, top=False, base_url={baseurl}, relative_uris=relative_uris, doc=self._doc.get("{fieldname}") + self.{safename}, top=False, base_url={baseurl}, relative_uris=relative_uris, keys = keys + ["{fieldname}"] ) if type(saved_val) == list: @@ -602,7 +613,7 @@ def declare_field( saved_val = saved_val[0] r["{fieldname}"] = saved_val - max_len = add_kv(old_doc = self._doc, new_doc = r, line_numbers = line_numbers, key = "{fieldname}", val = r.get("{fieldname}"), max_len = max_len, cols = cols) + max_len = add_kv(old_doc = doc, new_doc = r, line_numbers = line_numbers, key = "{fieldname}", val = r.get("{fieldname}"), max_len = max_len, cols = cols) """.format( safename=self.safe_name(name), fieldname=shortname(name), diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index ef1d231ff..941db96cc 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -43,6 +43,9 @@ IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] +doc_line_info = CommentedMap() + + class LoadingOptions: idx: IdxType fileuri: Optional[str] @@ -193,7 +196,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - line_info: Optional[CommentedMap] = None, + keys: Optional[list[Any]] = None, ) -> CommentedMap: """Convert this object to a JSON/YAML friendly dictionary.""" @@ -265,7 +268,7 @@ def get_line_numbers(doc: CommentedMap) -> dict[Any, dict[str, int]]: only save value info if value is hashable. """ line_numbers: Dict[Any, dict[str, int]] = {} - if isinstance(doc, dict): + if isinstance(doc, dict) or doc is None: return {} for key, value in doc.lc.data.items(): line_numbers[key] = {} @@ -301,43 +304,49 @@ def save( top: bool = True, base_url: str = "", relative_uris: bool = True, - doc: Optional[CommentedMap] = None, + keys: Optional[list[Any]] = None, ) -> save_type: """Save a val of any type. Recursively calls save method from class if val is of type Saveable. Otherwise, saves val to CommentedMap or CommentedSeq """ + if keys is None: + keys = [] + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + if isinstance(val, Saveable): return val.save( - top=top, base_url=base_url, relative_uris=relative_uris, line_info=doc + top=top, base_url=base_url, relative_uris=relative_uris, keys=keys ) if isinstance(val, MutableSequence): r = CommentedSeq() r.lc.data = {} for i in range(0, len(val)): + new_keys = keys if doc: - if i in doc.lc.data: + if i in doc: r.lc.data[i] = doc.lc.data[i] - if isinstance(doc, CommentedSeq): - r.append( - save( - val[i], - top=False, - base_url=base_url, - relative_uris=relative_uris, - doc=doc[i], - ) - ) - else: - r.append( - save( - val[i], - top=False, - base_url=base_url, - relative_uris=relative_uris, - doc=doc, - ) + new_keys.append(i) + r.append( + save( + val[i], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, ) + ) return r # return [ # save(v, top=False, base_url=base_url, relative_uris=relative_uris) @@ -345,25 +354,20 @@ def save( # ] if isinstance(val, MutableMapping): newdict = CommentedMap() + new_keys = keys for key in val: if doc: if key in doc: newdict.lc.add_kv_line_col(key, doc.lc.data[key]) - newdict[key] = save( - val[key], - top=False, - base_url=base_url, - relative_uris=relative_uris, - doc=doc.get(key), - ) - else: - newdict[key] = save( - val[key], - top=False, - base_url=base_url, - relative_uris=relative_uris, - doc=doc, - ) + new_keys.append(key) + + newdict[key] = save( + val[key], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, + ) return newdict # newdict = {} # for key in val: @@ -809,7 +813,7 @@ def load(self, doc, baseuri, loadingOptions, docRoot=None): def _document_load( loader: _Loader, - doc: Union[str, MutableMapping[str, Any], MutableSequence[Any]], + doc: Union[CommentedMap, str, MutableMapping[str, Any], MutableSequence[Any]], baseuri: str, loadingOptions: LoadingOptions, addl_metadata_fields: Optional[MutableSequence[str]] = None, @@ -869,7 +873,9 @@ def _document_load( loadingOptions.idx[docuri] = loadingOptions.idx[baseuri] return loadingOptions.idx[baseuri] - + if isinstance(doc, CommentedMap): + global doc_line_info + doc_line_info = doc if isinstance(doc, MutableSequence): loadingOptions.idx[baseuri] = ( loader.load(doc, baseuri, loadingOptions), From 74b23d00dbaed1f6177b7a8cc9bb89c2438fd5ff Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 23 Jan 2023 10:48:08 -0700 Subject: [PATCH 21/44] working on test --- schema_salad/tests/test_line_numbers.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/schema_salad/tests/test_line_numbers.py b/schema_salad/tests/test_line_numbers.py index 415e91e4f..fa9ec9c96 100644 --- a/schema_salad/tests/test_line_numbers.py +++ b/schema_salad/tests/test_line_numbers.py @@ -7,7 +7,24 @@ from schema_salad import codegen from schema_salad.avro.schema import Names from schema_salad.schema import load_schema -from schema_salad.utils import yaml_no_ts +from schema_salad.utils import yaml_no_ts, count_lines +import os + + + +def load_yaml(file_name: str) -> CommentedMap: + assert(os.path.isfile(file_name)) + with open(file_name) as f: + yaml = yaml_no_ts() + doc = yaml.load(f.read()) + return doc + +def test_line_number_comparision()->None: + v0_doc = load_yaml(count_lines["v0"]) + v1_doc = load_yaml(count_lines["v1"]) + v2_doc = load_yaml(count_lines["v2"]) + + def compare_line_numbers(original_doc: CommentedMap, codegen_doc: CommentedMap) -> None: From 8575f3fcc611863d51efbe8b753d92842f677099 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 23 Jan 2023 11:06:35 -0700 Subject: [PATCH 22/44] Fixing issue with type hints and indentation of setting global variable --- schema_salad/python_codegen_support.py | 11 ++++++----- schema_salad/tests/test_line_numbers.py | 26 ++++++++++++------------- 2 files changed, 18 insertions(+), 19 deletions(-) diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index 941db96cc..f90a9e88b 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -196,7 +196,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[list[Any]] = None, + keys: Optional[List[Any]] = None, ) -> CommentedMap: """Convert this object to a JSON/YAML friendly dictionary.""" @@ -304,7 +304,7 @@ def save( top: bool = True, base_url: str = "", relative_uris: bool = True, - keys: Optional[list[Any]] = None, + keys: Optional[List[Any]] = None, ) -> save_type: """Save a val of any type. @@ -858,6 +858,10 @@ def _document_load( if "$base" in doc: doc.pop("$base") + if isinstance(doc, CommentedMap): + global doc_line_info + doc_line_info = doc + if "$graph" in doc: loadingOptions.idx[baseuri] = ( loader.load(doc["$graph"], baseuri, loadingOptions), @@ -873,9 +877,6 @@ def _document_load( loadingOptions.idx[docuri] = loadingOptions.idx[baseuri] return loadingOptions.idx[baseuri] - if isinstance(doc, CommentedMap): - global doc_line_info - doc_line_info = doc if isinstance(doc, MutableSequence): loadingOptions.idx[baseuri] = ( loader.load(doc, baseuri, loadingOptions), diff --git a/schema_salad/tests/test_line_numbers.py b/schema_salad/tests/test_line_numbers.py index fa9ec9c96..22f5229d7 100644 --- a/schema_salad/tests/test_line_numbers.py +++ b/schema_salad/tests/test_line_numbers.py @@ -1,4 +1,5 @@ # from parser import load_document_by_uri, save +import os from pathlib import Path from typing import Any, Dict, List, Optional, cast @@ -7,24 +8,21 @@ from schema_salad import codegen from schema_salad.avro.schema import Names from schema_salad.schema import load_schema -from schema_salad.utils import yaml_no_ts, count_lines -import os +from schema_salad.utils import yaml_no_ts +# def load_yaml(file_name: str) -> CommentedMap: +# assert os.path.isfile(file_name) +# with open(file_name) as f: +# yaml = yaml_no_ts() +# doc = yaml.load(f.read()) +# return doc -def load_yaml(file_name: str) -> CommentedMap: - assert(os.path.isfile(file_name)) - with open(file_name) as f: - yaml = yaml_no_ts() - doc = yaml.load(f.read()) - return doc -def test_line_number_comparision()->None: - v0_doc = load_yaml(count_lines["v0"]) - v1_doc = load_yaml(count_lines["v1"]) - v2_doc = load_yaml(count_lines["v2"]) - - +# def test_line_number_comparision() -> None: + # v0_doc = load_yaml(count_lines["v0"]) + # v1_doc = load_yaml(count_lines["v1"]) + # v2_doc = load_yaml(count_lines["v2"]) def compare_line_numbers(original_doc: CommentedMap, codegen_doc: CommentedMap) -> None: From a08783348f7e8ae7225aae08ae4fa4463a6769d0 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 23 Jan 2023 11:07:47 -0700 Subject: [PATCH 23/44] adding metaschema.py --- schema_salad/metaschema.py | 91 +++++++++++++++----------------------- 1 file changed, 36 insertions(+), 55 deletions(-) diff --git a/schema_salad/metaschema.py b/schema_salad/metaschema.py index 90a7512b0..a93ba418d 100644 --- a/schema_salad/metaschema.py +++ b/schema_salad/metaschema.py @@ -199,7 +199,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[list[Any]] = None, + keys: Optional[List[Any]] = None, ) -> CommentedMap: """Convert this object to a JSON/YAML friendly dictionary.""" @@ -307,7 +307,7 @@ def save( top: bool = True, base_url: str = "", relative_uris: bool = True, - keys: Optional[list[Any]] = None, + keys: Optional[List[Any]] = None, ) -> save_type: """Save a val of any type. @@ -861,6 +861,10 @@ def _document_load( if "$base" in doc: doc.pop("$base") + if isinstance(doc, CommentedMap): + global doc_line_info + doc_line_info = doc + if "$graph" in doc: loadingOptions.idx[baseuri] = ( loader.load(doc["$graph"], baseuri, loadingOptions), @@ -876,9 +880,6 @@ def _document_load( loadingOptions.idx[docuri] = loadingOptions.idx[baseuri] return loadingOptions.idx[baseuri] - if isinstance(doc, CommentedMap): - global doc_line_info - doc_line_info = doc if isinstance(doc, MutableSequence): loadingOptions.idx[baseuri] = ( loader.load(doc, baseuri, loadingOptions), @@ -1025,6 +1026,7 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: + if extension_fields: self.extension_fields = extension_fields else: @@ -1154,17 +1156,14 @@ def fromDoc( return _constructed def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[list[Any]] = None, + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = doc_line_info for key in keys: + if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -1268,6 +1267,7 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: + if extension_fields: self.extension_fields = extension_fields else: @@ -1363,17 +1363,14 @@ def fromDoc( return _constructed def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[list[Any]] = None, + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = doc_line_info for key in keys: + if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -1471,6 +1468,7 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: + if extension_fields: self.extension_fields = extension_fields else: @@ -1597,17 +1595,14 @@ def fromDoc( return _constructed def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[list[Any]] = None, + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = doc_line_info for key in keys: + if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -1698,6 +1693,7 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: + if extension_fields: self.extension_fields = extension_fields else: @@ -1790,17 +1786,14 @@ def fromDoc( return _constructed def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[list[Any]] = None, + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = doc_line_info for key in keys: + if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -1894,6 +1887,7 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: + if extension_fields: self.extension_fields = extension_fields else: @@ -2198,17 +2192,14 @@ def fromDoc( return _constructed def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[list[Any]] = None, + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = doc_line_info for key in keys: + if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -2526,6 +2517,7 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: + if extension_fields: self.extension_fields = extension_fields else: @@ -2621,17 +2613,14 @@ def fromDoc( return _constructed def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[list[Any]] = None, + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = doc_line_info for key in keys: + if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -2723,6 +2712,7 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: + if extension_fields: self.extension_fields = extension_fields else: @@ -2896,17 +2886,14 @@ def fromDoc( return _constructed def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[list[Any]] = None, + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = doc_line_info for key in keys: + if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -3071,6 +3058,7 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: + if extension_fields: self.extension_fields = extension_fields else: @@ -3426,17 +3414,14 @@ def fromDoc( return _constructed def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[list[Any]] = None, + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = doc_line_info for key in keys: + if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -3768,6 +3753,7 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: + if extension_fields: self.extension_fields = extension_fields else: @@ -4076,17 +4062,14 @@ def fromDoc( return _constructed def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[list[Any]] = None, + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = doc_line_info for key in keys: + if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -4350,6 +4333,7 @@ def __init__( extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: + if extension_fields: self.extension_fields = extension_fields else: @@ -4573,17 +4557,14 @@ def fromDoc( return _constructed def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[list[Any]] = None, + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = doc_line_info for key in keys: + if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): From 7bfac7c21be0224e499affdcf6bb1b1d0d54ac3c Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 23 Jan 2023 12:31:32 -0700 Subject: [PATCH 24/44] fix type error --- schema_salad/metaschema.py | 8 ++++---- schema_salad/python_codegen_support.py | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/schema_salad/metaschema.py b/schema_salad/metaschema.py index a93ba418d..15219ab14 100644 --- a/schema_salad/metaschema.py +++ b/schema_salad/metaschema.py @@ -233,11 +233,11 @@ def load_field(val, fieldtype, baseuri, loadingOptions): def add_kv( old_doc: CommentedMap, new_doc: CommentedMap, - line_numbers: dict[Any, dict[str, int]], + line_numbers: Dict[Any, Dict[str, int]], key: str, val: Any, max_len: int, - cols: dict[int, int], + cols: Dict[int, int], ) -> int: """Add key value pair into Commented Map. @@ -264,13 +264,13 @@ def add_kv( return max_len -def get_line_numbers(doc: CommentedMap) -> dict[Any, dict[str, int]]: +def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: """Get line numbers for kv pairs in CommentedMap. For each key/value pair in a CommentedMap, save the line/col info into a dictionary, only save value info if value is hashable. """ - line_numbers: Dict[Any, dict[str, int]] = {} + line_numbers: Dict[Any, Dict[str, int]] = {} if isinstance(doc, dict) or doc is None: return {} for key, value in doc.lc.data.items(): diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index f90a9e88b..4abb089f5 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -230,11 +230,11 @@ def load_field(val, fieldtype, baseuri, loadingOptions): def add_kv( old_doc: CommentedMap, new_doc: CommentedMap, - line_numbers: dict[Any, dict[str, int]], + line_numbers: Dict[Any, Dict[str, int]], key: str, val: Any, max_len: int, - cols: dict[int, int], + cols: Dict[int, int], ) -> int: """Add key value pair into Commented Map. @@ -261,13 +261,13 @@ def add_kv( return max_len -def get_line_numbers(doc: CommentedMap) -> dict[Any, dict[str, int]]: +def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: """Get line numbers for kv pairs in CommentedMap. For each key/value pair in a CommentedMap, save the line/col info into a dictionary, only save value info if value is hashable. """ - line_numbers: Dict[Any, dict[str, int]] = {} + line_numbers: Dict[Any, Dict[str, int]] = {} if isinstance(doc, dict) or doc is None: return {} for key, value in doc.lc.data.items(): From 06fc51328de26d7fb13befc19bed79f93b6e15eb Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 23 Jan 2023 12:53:31 -0700 Subject: [PATCH 25/44] fix type error --- schema_salad/metaschema.py | 20 ++++++++++---------- schema_salad/python_codegen.py | 2 +- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/schema_salad/metaschema.py b/schema_salad/metaschema.py index 15219ab14..cac538f80 100644 --- a/schema_salad/metaschema.py +++ b/schema_salad/metaschema.py @@ -1156,7 +1156,7 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -1363,7 +1363,7 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -1595,7 +1595,7 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -1786,7 +1786,7 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -2192,7 +2192,7 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -2613,7 +2613,7 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -2886,7 +2886,7 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -3414,7 +3414,7 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -4062,7 +4062,7 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -4557,7 +4557,7 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] diff --git a/schema_salad/python_codegen.py b/schema_salad/python_codegen.py index 1cb7c64c9..f1f62320d 100644 --- a/schema_salad/python_codegen.py +++ b/schema_salad/python_codegen.py @@ -274,7 +274,7 @@ def fromDoc( self.serializer.write( """ def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[list[Any]] = None + self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] From 41d406d45f4711a4a5141974e31f43d40065c0af Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Tue, 28 Mar 2023 17:27:22 -0600 Subject: [PATCH 26/44] updates to codegen --- schema_salad/python_codegen.py | 187 +++++++++++++++++++++++-- schema_salad/python_codegen_support.py | 79 +++++++---- 2 files changed, 221 insertions(+), 45 deletions(-) diff --git a/schema_salad/python_codegen.py b/schema_salad/python_codegen.py index f1f62320d..e764c1df5 100644 --- a/schema_salad/python_codegen.py +++ b/schema_salad/python_codegen.py @@ -295,7 +295,7 @@ def save( if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -319,12 +319,176 @@ def save( self.serializer.write( """ - r["class"] = "{class_}" - max_len = add_kv(old_doc=doc, new_doc=r, line_numbers=line_numbers, key="class", val=r.get("class"), max_len=max_len, cols=cols) + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + ) + if doc: + if u in doc: + keys.append(u) + if isinstance(doc.get(u), (CommentedMap, CommentedSeq)): + doc = doc.get(u) + line_numbers = get_line_numbers(doc) + min_col = get_min_col(line_numbers) + + for key in set(doc.lc.data.keys()) - set(['id']): + + if key == 'class': + r["class"] = "class_" + add_kv(old_doc=doc, new_doc=r, line_numbers=line_numbers, key="class", val=r.get("class"), cols=cols, min_col=min_col,) + elif getattr(self, key) is not None: + + saved_val = save( + getattr(self, key), + top=False, + base_url=self.id, + relative_uris=relative_uris, + keys=keys + [key], + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + + r[key] = saved_val + + add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + ) + + for key in set(self.attrs) - set(doc.lc.data.keys()) - set(['id']): + if key == 'class': + r["class"] = "{class_}" + add_kv(old_doc=doc, new_doc=r, line_numbers=line_numbers, key="class", val=r.get("class"), cols=cols, min_col=min_col,) + elif getattr(self, key) is not None: + saved_val = save( + getattr(self, key), + top=False, + base_url=self.id, + relative_uris=relative_uris, + keys=keys + [key], + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r[key] = saved_val + + add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + ) + """.format( class_=classname ) ) + else: + self.serializer.write( + """ + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + ) + if doc: + if u in doc: + keys.append(u) + if isinstance(doc.get(u), (CommentedMap, CommentedSeq)): + doc = doc.get(u) + line_numbers = get_line_numbers(doc) + min_col = get_min_col(line_numbers) + + for key in set(doc.lc.data.keys()) - set(['id']): + + if getattr(self, key) is not None: + + saved_val = save( + getattr(self, key), + top=False, + base_url=self.id, + relative_uris=relative_uris, + keys=keys + [key], + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + + r[key] = saved_val + + add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + ) + + for key in set(self.attrs) - set(doc.lc.data.keys()) - set(['id']): + + if getattr(self, key) is not None: + saved_val = save( + getattr(self, key), + top=False, + base_url=self.id, + relative_uris=relative_uris, + keys=keys + [key], + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r[key] = saved_val + + add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + ) + +""" + ) def end_class(self, classname: str, field_names: List[str]) -> None: """Signal that we are done with this class.""" @@ -571,6 +735,7 @@ def declare_field( {safename} = None """.format( safename=self.safe_name(name) + ) ) @@ -586,15 +751,12 @@ def declare_field( if self.{safename} is not None: u = save_relative_uri(self.{safename}, {baseurl}, {scoped_id}, {ref_scope}, relative_uris) r["{fieldname}"] = u - max_len = add_kv(old_doc = doc, new_doc = r, line_numbers = line_numbers, key = "{key_1}", val = r.get("{key_2}"), max_len = max_len, cols = cols) """.format( safename=self.safe_name(name), fieldname=shortname(name).strip(), baseurl=baseurl, scoped_id=fieldtype.scoped_id, ref_scope=fieldtype.ref_scope, - key_1=self.safe_name(name), - key_2=self.safe_name(name), ), 8, ) @@ -604,16 +766,9 @@ def declare_field( fmt( """ if self.{safename} is not None: - saved_val = save( - self.{safename}, top=False, base_url={baseurl}, relative_uris=relative_uris, keys = keys + ["{fieldname}"] + r["{fieldname}"] = save( + self.{safename}, top=False, base_url={baseurl}, relative_uris=relative_uris ) - - if type(saved_val) == list: - if len(saved_val) == 1: # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["{fieldname}"] = saved_val - - max_len = add_kv(old_doc = doc, new_doc = r, line_numbers = line_numbers, key = "{fieldname}", val = r.get("{fieldname}"), max_len = max_len, cols = cols) """.format( safename=self.safe_name(name), fieldname=shortname(name), @@ -623,6 +778,8 @@ def declare_field( ) ) + + def uri_loader( self, inner: TypeDef, diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index 4abb089f5..a927be299 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -44,6 +44,7 @@ doc_line_info = CommentedMap() +inserted_line_info: Dict[int,int] = {} class LoadingOptions: @@ -233,33 +234,62 @@ def add_kv( line_numbers: Dict[Any, Dict[str, int]], key: str, val: Any, - max_len: int, cols: Dict[int, int], + min_col: int = 0 ) -> int: """Add key value pair into Commented Map. Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers for each key/val pair in the old CommentedMap, key/val pair to insert, max_line of the old CommentedMap, and max col value taken for each line. """ - if key in line_numbers: # If the key to insert is in the original CommentedMap - new_doc.lc.add_kv_line_col(key, old_doc.lc.data[key]) - elif isinstance(val, (int, float, bool, str)): # If the value is hashable + if len(inserted_line_info.keys()) >= 1: + max_line = max(inserted_line_info.keys()) + 1 + else: + max_line = 0 + if key in line_numbers: # If the key to insert is in the original CommentedMap as a key + line_info = old_doc.lc.data[key] + if line_info[0] not in inserted_line_info: + new_doc.lc.add_kv_line_col(key, old_doc.lc.data[key]) + inserted_line_info[old_doc.lc.data[key][0]] = old_doc.lc.data[key][1] + else: + new_doc.lc.add_kv_line_col(key, [max_line, old_doc.lc.data[key][1], max_line + (max_line - old_doc.lc.data[key][2]), old_doc.lc.data[key][3]]) + elif isinstance(val, (int, float, str)) and not isinstance(val, bool): # If the value is hashable if val in line_numbers: # If the value is in the original CommentedMap line = line_numbers[val]["line"] + if line in inserted_line_info: + line = max_line if line in cols: col = max(line_numbers[val]["col"], cols[line]) else: col = line_numbers[val]["col"] new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) - cols[line] = col + len("id") + 2 - else: # If neither the key or value is in the original CommentedMap (or value is not hashable) - new_doc.lc.add_kv_line_col(key, [max_len, 0, max_len, len(key) + 2]) - max_len += 1 - else: # If neither the key or value is in the original CommentedMap (or value is not hashable) - new_doc.lc.add_kv_line_col(key, [max_len, 0, max_len, len(key) + 2]) - max_len += 1 - return max_len - + inserted_line_info[line] = col + len(key) + 2 + cols[line] = col + len(key) + 2 + elif val + "?" in line_numbers: + line = line_numbers[val + "?"]["line"] + if line in inserted_line_info: + line = max_line + if line in cols: + col = max(line_numbers[val + "?"]["col"], cols[line]) + else: + col = line_numbers[val + "?"]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + cols[line] = col + len(key) + 2 + # elif old_doc: + # if val in old_doc: + # index = old_doc.index(val) + # line_info = old_doc.lc.data[index] + # if line_info[0] not in inserted_line_info: + # new_doc.lc.add_kv_line_col(key, old_doc.lc.data[index]) + # inserted_line_info[old_doc.lc.data[index][0]] = old_doc.lc.data[index][1] + # else: + # new_doc.lc.add_kv_line_col(key, [max_line, old_doc.lc.data[index][1], max_line + (max_line - old_doc.lc.data[index][2]), old_doc.lc.data[index][3]]) + # inserted_line_info[max_line] = old_doc.lc.data[index][1] + # If neither the key or value is in the original CommentedMap (or value is not hashable) + new_doc.lc.add_kv_line_col(key, [max_line, min_col, max_line, min_col + len(key) + 2]) + inserted_line_info[max_line] = min_col + len(key) + 2 + cols[max_line] = col + len(key) + 2 def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: """Get line numbers for kv pairs in CommentedMap. @@ -281,23 +311,12 @@ def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: line_numbers[value]["col"] = doc.lc.data[key][3] return line_numbers - -def get_max_line_num(doc: CommentedMap) -> int: - """Get the max line number for a CommentedMap. - - Iterate through the the key with the highest line number until you reach a non-CommentedMap value or empty CommentedMap. - """ - max_line = 0 - max_key = "" - cur = doc - while isinstance(cur, CommentedMap) and len(cur) > 0: - for key in cur.lc.data.keys(): - if cur.lc.data[key][2] >= max_line: - max_line = cur.lc.data[key][2] - max_key = key - cur = cur[max_key] - return max_line + 1 - +def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> str: + min_col = 0 + for line in line_numbers: + if line_numbers[line]['col'] > min_col: + min_col = line_numbers[line]['col'] + return min_col def save( val: Any, From f4e098b5ae9e852db655ced769c78bf645ce5534 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Fri, 5 May 2023 15:17:49 -0400 Subject: [PATCH 27/44] Updating python_codegen and python_codegen_support for cleaner logic and less conditionals - as well as some bug fixes --- schema_salad/python_codegen.py | 218 +++++++++---------------- schema_salad/python_codegen_support.py | 116 +++++++++---- 2 files changed, 162 insertions(+), 172 deletions(-) diff --git a/schema_salad/python_codegen.py b/schema_salad/python_codegen.py index 306813f51..4cee8dbde 100644 --- a/schema_salad/python_codegen.py +++ b/schema_salad/python_codegen.py @@ -295,8 +295,10 @@ def save( if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) min_col = get_min_col(line_numbers) cols: Dict[int, int] = {} + skipped = set() if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] @@ -306,6 +308,7 @@ def save( """ ) + if "class" in field_names: self.out.write( """ @@ -315,99 +318,20 @@ def save( """.format( class_=classname ) + ) self.serializer.write( """ - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - ) - if doc: - if u in doc: - keys.append(u) - if isinstance(doc.get(u), (CommentedMap, CommentedSeq)): - doc = doc.get(u) - line_numbers = get_line_numbers(doc) - min_col = get_min_col(line_numbers) - - for key in set(doc.lc.data.keys()) - set(['id']): - - if key == 'class': - r["class"] = "class_" - add_kv(old_doc=doc, new_doc=r, line_numbers=line_numbers, key="class", val=r.get("class"), cols=cols, min_col=min_col,) - elif getattr(self, key) is not None: - - saved_val = save( - getattr(self, key), - top=False, - base_url=self.id, - relative_uris=relative_uris, - keys=keys + [key], - ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - - r[key] = saved_val - - add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - ) - - for key in set(self.attrs) - set(doc.lc.data.keys()) - set(['id']): - if key == 'class': - r["class"] = "{class_}" - add_kv(old_doc=doc, new_doc=r, line_numbers=line_numbers, key="class", val=r.get("class"), cols=cols, min_col=min_col,) - elif getattr(self, key) is not None: - saved_val = save( - getattr(self, key), - top=False, - base_url=self.id, - relative_uris=relative_uris, - keys=keys + [key], - ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r[key] = saved_val - - add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - ) - + r["class"] = "{class_}" + add_kv(old_doc=doc, new_doc=r, line_numbers=line_numbers, key="class", val=r.get("class"), cols=cols, min_col=min_col,max_len=max_len) + skipped.add("class") """.format( class_=classname ) ) - else: - self.serializer.write( + if "id" in field_names: + self.serializer.write( """ if self.id is not None: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -420,6 +344,7 @@ def save( val=r.get("id"), cols=cols, min_col=min_col, + max_len=max_len ) if doc: if u in doc: @@ -428,67 +353,76 @@ def save( doc = doc.get(u) line_numbers = get_line_numbers(doc) min_col = get_min_col(line_numbers) - - for key in set(doc.lc.data.keys()) - set(['id']): - - if getattr(self, key) is not None: - - saved_val = save( - getattr(self, key), - top=False, - base_url=self.id, - relative_uris=relative_uris, - keys=keys + [key], - ) + skipped.add("id") +""" + ) + self.serializer.write( + """ + for key in set(self.attrs) - skipped: + if isinstance(key, str): + if getattr(self, key) is not None: + saved_val = save( + getattr(self, key), + top=False, + base_url=self.id, + relative_uris=relative_uris, + keys=keys + [key], + ) - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - - r[key] = saved_val - - add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - ) - - for key in set(self.attrs) - set(doc.lc.data.keys()) - set(['id']): - - if getattr(self, key) is not None: - saved_val = save( - getattr(self, key), - top=False, - base_url=self.id, - relative_uris=relative_uris, - keys=keys + [key], - ) + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r[key] = saved_val + + add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r[key] = saved_val - - add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - ) +""" + ) + else: + self.serializer.write( + """ + for key in set(self.attrs) - skipped: + if isinstance(key, str): + if getattr(self, key) is not None: + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + r[key] = saved_val + + add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) """ - ) + ) + def end_class(self, classname: str, field_names: List[str]) -> None: """Signal that we are done with this class.""" diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index 1b18b4123..a7847fc87 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -245,33 +245,54 @@ def load_field(val, fieldtype, baseuri, loadingOptions): Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str] ] - def add_kv( old_doc: CommentedMap, new_doc: CommentedMap, line_numbers: Dict[Any, Dict[str, int]], key: str, val: Any, + max_len: int, cols: Dict[int, int], - min_col: int = 0 + min_col: int = 0, ) -> int: """Add key value pair into Commented Map. Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers for each key/val pair in the old CommentedMap, key/val pair to insert, max_line of the old CommentedMap, and max col value taken for each line. """ + # print("-----------") + # print(key) + # print(val) + # print(max_len) + # print(line_numbers) + # print(old_doc) + # print("-----------") if len(inserted_line_info.keys()) >= 1: max_line = max(inserted_line_info.keys()) + 1 else: max_line = 0 - if key in line_numbers: # If the key to insert is in the original CommentedMap as a key + if ( + key in line_numbers + ): # If the key to insert is in the original CommentedMap as a key line_info = old_doc.lc.data[key] if line_info[0] not in inserted_line_info: new_doc.lc.add_kv_line_col(key, old_doc.lc.data[key]) inserted_line_info[old_doc.lc.data[key][0]] = old_doc.lc.data[key][1] else: - new_doc.lc.add_kv_line_col(key, [max_line, old_doc.lc.data[key][1], max_line + (max_line - old_doc.lc.data[key][2]), old_doc.lc.data[key][3]]) - elif isinstance(val, (int, float, str)) and not isinstance(val, bool): # If the value is hashable + new_doc.lc.add_kv_line_col( + key, + [ + max_line, + old_doc.lc.data[key][1], + max_line + (max_line - old_doc.lc.data[key][2]), + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[max_line] = old_doc.lc.data[key][1] + return max_len + elif isinstance(val, (int, float, str)) and not isinstance( + val, bool + ): # If the value is hashable if val in line_numbers: # If the value is in the original CommentedMap line = line_numbers[val]["line"] if line in inserted_line_info: @@ -282,32 +303,48 @@ def add_kv( col = line_numbers[val]["col"] new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) inserted_line_info[line] = col + len(key) + 2 - cols[line] = col + len(key) + 2 + cols[line] = col + len("id") + 2 + return max_len elif val + "?" in line_numbers: - line = line_numbers[val + "?"]["line"] - if line in inserted_line_info: - line = max_line - if line in cols: - col = max(line_numbers[val + "?"]["col"], cols[line]) + line = line_numbers[val + "?"]["line"] + if line in inserted_line_info: + line = max_line + if line in cols: + col = max(line_numbers[val + "?"]["col"], cols[line]) + else: + col = line_numbers[val + "?"]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + cols[line] = col + len("id") + 2 + return max_len + elif old_doc: + if val in old_doc: + index = old_doc.index(val) + line_info = old_doc.lc.data[index] + if line_info[0] not in inserted_line_info: + new_doc.lc.add_kv_line_col(key, old_doc.lc.data[index]) + inserted_line_info[old_doc.lc.data[index][0]] = old_doc.lc.data[ + index + ][1] else: - col = line_numbers[val + "?"]["col"] - new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) - inserted_line_info[line] = col + len(key) + 2 - cols[line] = col + len(key) + 2 - # elif old_doc: - # if val in old_doc: - # index = old_doc.index(val) - # line_info = old_doc.lc.data[index] - # if line_info[0] not in inserted_line_info: - # new_doc.lc.add_kv_line_col(key, old_doc.lc.data[index]) - # inserted_line_info[old_doc.lc.data[index][0]] = old_doc.lc.data[index][1] - # else: - # new_doc.lc.add_kv_line_col(key, [max_line, old_doc.lc.data[index][1], max_line + (max_line - old_doc.lc.data[index][2]), old_doc.lc.data[index][3]]) - # inserted_line_info[max_line] = old_doc.lc.data[index][1] + new_doc.lc.add_kv_line_col( + key, + [ + max_line, + old_doc.lc.data[index][1], + max_line + (max_line - old_doc.lc.data[index][2]), + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[max_line] = old_doc.lc.data[index][1] # If neither the key or value is in the original CommentedMap (or value is not hashable) - new_doc.lc.add_kv_line_col(key, [max_line, min_col, max_line, min_col + len(key) + 2]) + new_doc.lc.add_kv_line_col( + key, [max_line, min_col, max_line, min_col + len(key) + 2] + ) inserted_line_info[max_line] = min_col + len(key) + 2 - cols[max_line] = col + len(key) + 2 + + return max_len + 1 + def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: """Get line numbers for kv pairs in CommentedMap. @@ -316,7 +353,7 @@ def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: only save value info if value is hashable. """ line_numbers: Dict[Any, Dict[str, int]] = {} - if isinstance(doc, dict) or doc is None: + if doc is None: return {} for key, value in doc.lc.data.items(): line_numbers[key] = {} @@ -329,13 +366,32 @@ def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: line_numbers[value]["col"] = doc.lc.data[key][3] return line_numbers + def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> str: min_col = 0 for line in line_numbers: - if line_numbers[line]['col'] > min_col: - min_col = line_numbers[line]['col'] + if line_numbers[line]["col"] > min_col: + min_col = line_numbers[line]["col"] return min_col + +def get_max_line_num(doc: CommentedMap) -> int: + """Get the max line number for a CommentedMap. + + Iterate through the the key with the highest line number until you reach a non-CommentedMap value or empty CommentedMap. + """ + max_line = 0 + max_key = "" + cur = doc + while isinstance(cur, CommentedMap) and len(cur) > 0: + for key in cur.lc.data.keys(): + # print(cur.lc.data[key][2]) + if cur.lc.data[key][2] >= max_line: + max_line = cur.lc.data[key][2] + max_key = key + cur = cur[max_key] + return max_line + 1 + def save( val: Any, top: bool = True, From 8263fdb46c9a379f610d4569425f88f3edce2944 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 8 May 2023 10:23:18 -0400 Subject: [PATCH 28/44] updating for consistent line numbers --- schema_salad/python_codegen.py | 218 ++++++++++++++----------- schema_salad/python_codegen_support.py | 51 +++--- 2 files changed, 149 insertions(+), 120 deletions(-) diff --git a/schema_salad/python_codegen.py b/schema_salad/python_codegen.py index 4cee8dbde..67ffe3fd9 100644 --- a/schema_salad/python_codegen.py +++ b/schema_salad/python_codegen.py @@ -274,7 +274,11 @@ def fromDoc( self.serializer.write( """ def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -308,7 +312,6 @@ def save( """ ) - if "class" in field_names: self.out.write( """ @@ -318,14 +321,11 @@ def save( """.format( class_=classname ) - ) self.serializer.write( """ r["class"] = "{class_}" - add_kv(old_doc=doc, new_doc=r, line_numbers=line_numbers, key="class", val=r.get("class"), cols=cols, min_col=min_col,max_len=max_len) - skipped.add("class") """.format( class_=classname ) @@ -333,96 +333,99 @@ def save( if "id" in field_names: self.serializer.write( """ - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len - ) - if doc: - if u in doc: - keys.append(u) - if isinstance(doc.get(u), (CommentedMap, CommentedSeq)): - doc = doc.get(u) - line_numbers = get_line_numbers(doc) - min_col = get_min_col(line_numbers) - skipped.add("id") -""" - ) - self.serializer.write( - """ - for key in set(self.attrs) - skipped: + for key in doc.lc.data.keys(): if isinstance(key, str): - if getattr(self, key) is not None: - saved_val = save( - getattr(self, key), - top=False, - base_url=self.id, - relative_uris=relative_uris, - keys=keys + [key], - ) + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=self.id, + relative_uris=relative_uris, + keys=keys + [key], + ) + + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): # If the returned value is a list of size 1, just save the value in the list + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r[key] = saved_val - - add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len - ) -""" - ) - else: - self.serializer.write( - """ - for key in set(self.attrs) - skipped: - if isinstance(key, str): - if getattr(self, key) is not None: - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - ) - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r[key] = saved_val - - add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len - ) """ - ) - + ) +# self.serializer.write( +# """ +# if self.id is not None and "id" not in r: +# u = save_relative_uri(self.id, base_url, True, None, relative_uris) +# r["id"] = u +# add_kv( +# old_doc=doc, +# new_doc=r, +# line_numbers=line_numbers, +# key="id", +# val=r.get("id"), +# cols=cols, +# min_col=min_col, +# max_len=max_len +# ) +# if doc: +# if u in doc: +# keys.append(u) +# if isinstance(doc.get(u), (CommentedMap, CommentedSeq)): +# doc = doc.get(u) +# line_numbers = get_line_numbers(doc) +# min_col = get_min_col(line_numbers) +# """ +# ) +# else: +# self.serializer.write( +# """ +# for key in self.ordered_attrs.keys(): +# if isinstance(key, str) and key not in r: +# if getattr(self, key) is not None: +# saved_val = save( +# getattr(self, key), +# top=False, +# base_url=base_url, +# relative_uris=relative_uris, +# keys=keys + [key], +# ) + +# if type(saved_val) == list: +# if ( +# len(saved_val) == 1 +# ): # If the returned value is a list of size 1, just save the value in the list +# saved_val = saved_val[0] +# r[key] = saved_val + +# add_kv( +# old_doc=doc, +# new_doc=r, +# line_numbers=line_numbers, +# key=key, +# val=r.get(key), +# cols=cols, +# min_col=min_col, +# max_len=max_len +# ) +# """ +# ) def end_class(self, classname: str, field_names: List[str]) -> None: """Signal that we are done with this class.""" @@ -478,6 +481,14 @@ def end_class(self, classname: str, field_names: List[str]) -> None: fmt(f"""attrs = frozenset(["{'", "'.join(field_names)}"])\n""", 4) ) + # names = [] + # for name in field_names: + # names.append("('%s', 0)"%name) + + # self.serializer.write( + # fmt(f"""ordered_attrs = CommentedMap(["{', '.join(names)}])\n""", 4) + # ) + safe_init_fields = [ self.safe_name(f) for f in field_names if f != "class" ] # type: List[str] @@ -670,7 +681,6 @@ def declare_field( {safename} = None """.format( safename=self.safe_name(name) - ) ) @@ -683,9 +693,19 @@ def declare_field( self.serializer.write( fmt( """ -if self.{safename} is not None: +if self.{safename} is not None and "{fieldname}" not in r: u = save_relative_uri(self.{safename}, {baseurl}, {scoped_id}, {ref_scope}, relative_uris) r["{fieldname}"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="{fieldname}", + val=r.get("{fieldname}"), + cols=cols, + min_col=min_col, + max_len=max_len + ) """.format( safename=self.safe_name(name), fieldname=shortname(name).strip(), @@ -700,10 +720,20 @@ def declare_field( self.serializer.write( fmt( """ -if self.{safename} is not None: +if self.{safename} is not None and "{fieldname}" not in r: r["{fieldname}"] = save( self.{safename}, top=False, base_url={baseurl}, relative_uris=relative_uris ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="{fieldname}", + val=r.get("{fieldname}"), + cols=cols, + min_col=min_col, + max_len=max_len + ) """.format( safename=self.safe_name(name), fieldname=shortname(name), @@ -713,8 +743,6 @@ def declare_field( ) ) - - def uri_loader( self, inner: TypeDef, diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index a7847fc87..14ee8c100 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -44,7 +44,7 @@ doc_line_info = CommentedMap() -inserted_line_info: Dict[int,int] = {} +inserted_line_info: Dict[int, int] = {} class LoadingOptions: @@ -245,6 +245,7 @@ def load_field(val, fieldtype, baseuri, loadingOptions): Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str] ] + def add_kv( old_doc: CommentedMap, new_doc: CommentedMap, @@ -257,16 +258,10 @@ def add_kv( ) -> int: """Add key value pair into Commented Map. - Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers for each key/val pair in the old CommentedMap, - key/val pair to insert, max_line of the old CommentedMap, and max col value taken for each line. + Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers + for each key/val pair in the old CommentedMap,key/val pair to insert, max_line of the old CommentedMap, + and max col value taken for each line. """ - # print("-----------") - # print(key) - # print(val) - # print(max_len) - # print(line_numbers) - # print(old_doc) - # print("-----------") if len(inserted_line_info.keys()) >= 1: max_line = max(inserted_line_info.keys()) + 1 else: @@ -305,18 +300,19 @@ def add_kv( inserted_line_info[line] = col + len(key) + 2 cols[line] = col + len("id") + 2 return max_len - elif val + "?" in line_numbers: - line = line_numbers[val + "?"]["line"] - if line in inserted_line_info: - line = max_line - if line in cols: - col = max(line_numbers[val + "?"]["col"], cols[line]) - else: - col = line_numbers[val + "?"]["col"] - new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) - inserted_line_info[line] = col + len(key) + 2 - cols[line] = col + len("id") + 2 - return max_len + elif isinstance(val, str): + if val + "?" in line_numbers: + line = line_numbers[val + "?"]["line"] + if line in inserted_line_info: + line = max_line + if line in cols: + col = max(line_numbers[val + "?"]["col"], cols[line]) + else: + col = line_numbers[val + "?"]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + cols[line] = col + len("id") + 2 + return max_len elif old_doc: if val in old_doc: index = old_doc.index(val) @@ -355,6 +351,8 @@ def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: line_numbers: Dict[Any, Dict[str, int]] = {} if doc is None: return {} + if doc.lc.data is None: + return {} for key, value in doc.lc.data.items(): line_numbers[key] = {} @@ -378,7 +376,8 @@ def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> str: def get_max_line_num(doc: CommentedMap) -> int: """Get the max line number for a CommentedMap. - Iterate through the the key with the highest line number until you reach a non-CommentedMap value or empty CommentedMap. + Iterate through the the key with the highest line number until you reach a non-CommentedMap value + or empty CommentedMap. """ max_line = 0 max_key = "" @@ -392,6 +391,7 @@ def get_max_line_num(doc: CommentedMap) -> int: cur = cur[max_key] return max_line + 1 + def save( val: Any, top: bool = True, @@ -401,7 +401,8 @@ def save( ) -> save_type: """Save a val of any type. - Recursively calls save method from class if val is of type Saveable. Otherwise, saves val to CommentedMap or CommentedSeq + Recursively calls save method from class if val is of type Saveable. + Otherwise, saves val to CommentedMap or CommentedSeq. """ if keys is None: keys = [] @@ -428,7 +429,7 @@ def save( for i in range(0, len(val)): new_keys = keys if doc: - if i in doc: + if str(i) in doc: r.lc.data[i] = doc.lc.data[i] new_keys.append(i) r.append( From add86c6f9394330eb0ec8ffda8b6cc67ccebf061 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Thu, 11 May 2023 17:45:02 -0400 Subject: [PATCH 29/44] adding files for line number tests --- .../test_schema/test_outputs_before_inputs.cwl | 17 +++++++++++++++++ .../test_schema/test_secondary_files_dsl.cwl | 18 ++++++++++++++++++ .../tests/test_schema/test_type_dsl.cwl | 17 +++++++++++++++++ 3 files changed, 52 insertions(+) create mode 100644 schema_salad/tests/test_schema/test_outputs_before_inputs.cwl create mode 100644 schema_salad/tests/test_schema/test_secondary_files_dsl.cwl create mode 100644 schema_salad/tests/test_schema/test_type_dsl.cwl diff --git a/schema_salad/tests/test_schema/test_outputs_before_inputs.cwl b/schema_salad/tests/test_schema/test_outputs_before_inputs.cwl new file mode 100644 index 000000000..e1594c790 --- /dev/null +++ b/schema_salad/tests/test_schema/test_outputs_before_inputs.cwl @@ -0,0 +1,17 @@ +class: CommandLineTool +cwlVersion: v1.2 +baseCommand: python3 + +outputs: + hello_output: + type: File + outputBinding: + glob: hello-out.txt + +inputs: + files: + type: File + default: "script.py" + other_file: File + +stdout: hello-out.txt \ No newline at end of file diff --git a/schema_salad/tests/test_schema/test_secondary_files_dsl.cwl b/schema_salad/tests/test_schema/test_secondary_files_dsl.cwl new file mode 100644 index 000000000..1f6c712a4 --- /dev/null +++ b/schema_salad/tests/test_schema/test_secondary_files_dsl.cwl @@ -0,0 +1,18 @@ +class: CommandLineTool +cwlVersion: v1.2 +baseCommand: python3 + +inputs: + files: + type: File + default: "script.py" + other_file: File + +outputs: + hello_output: + type: File + secondaryFiles: ["inputB.txt", "inputC.txt?"] + outputBinding: + glob: hello-out.txt + +stdout: hello-out.txt diff --git a/schema_salad/tests/test_schema/test_type_dsl.cwl b/schema_salad/tests/test_schema/test_type_dsl.cwl new file mode 100644 index 000000000..5b822d812 --- /dev/null +++ b/schema_salad/tests/test_schema/test_type_dsl.cwl @@ -0,0 +1,17 @@ +class: CommandLineTool +cwlVersion: v1.2 +baseCommand: python3 + +inputs: + files: + type: File? + default: "script.py" + other_file: File + +outputs: + hello_output: + type: File + outputBinding: + glob: hello-out.txt + +stdout: hello-out.txt From 625a3a5606d3ff6aa736988a92c9b52bd0c69ab6 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Thu, 11 May 2023 17:46:33 -0400 Subject: [PATCH 30/44] adding cwl python codegen files for tests and having them be ignored wherever metaschema.py is ignored --- .flake8 | 1 + Makefile | 15 +- schema_salad/tests/cwl_v1_0.py | 19739 ++++++++++++++++++++++++ schema_salad/tests/cwl_v1_1.py | 22729 ++++++++++++++++++++++++++++ schema_salad/tests/cwl_v1_2.py | 24809 +++++++++++++++++++++++++++++++ 5 files changed, 67286 insertions(+), 7 deletions(-) create mode 100644 schema_salad/tests/cwl_v1_0.py create mode 100644 schema_salad/tests/cwl_v1_1.py create mode 100644 schema_salad/tests/cwl_v1_2.py diff --git a/.flake8 b/.flake8 index 45fea3205..ed11a6ee7 100644 --- a/.flake8 +++ b/.flake8 @@ -8,3 +8,4 @@ extend-select = B9 per-file-ignores = schema_salad/metaschema.py:B950 schema_salad/tests/*.py:B011 + schema_salad/tests/cwl*.py:B950 diff --git a/Makefile b/Makefile index 4a57354c2..60a706109 100644 --- a/Makefile +++ b/Makefile @@ -27,6 +27,7 @@ EXTRAS=[pycodegen] # `SHELL=bash` doesn't work for some, so don't use BASH-isms like # `[[` conditional expressions. PYSOURCES=$(wildcard ${MODULE}/**.py ${MODULE}/avro/*.py ${MODULE}/tests/*.py) setup.py +EXCLUDE_FILES := schema_salad/metaschema.py schema_salad/tests/cwl_v1_0.py schema_salad/tests/cwl_v1_1.py schema_salad/tests/cwl_v1_2.py DEVPKGS=-rdev-requirements.txt -rtest-requirements.txt -rmypy-requirements.txt COVBASE=coverage run --append @@ -80,18 +81,18 @@ clean: FORCE # Linting and code style related targets ## sort_import : sorting imports using isort: https://github.com/timothycrosley/isort -sort_imports: $(filter-out schema_salad/metaschema.py,$(PYSOURCES)) mypy-stubs +sort_imports: $(filter-out $(EXCLUDE_FILES),$(PYSOURCES)) mypy-stubs isort $^ -remove_unused_imports: $(filter-out schema_salad/metaschema.py,$(PYSOURCES)) +remove_unused_imports: $(filter-out $(EXCLUDE_FILES),$(PYSOURCES)) autoflake --in-place --remove-all-unused-imports $^ pep257: pydocstyle ## pydocstyle : check Python docstring style -pydocstyle: $(filter-out schema_salad/metaschema.py,$(PYSOURCES)) +pydocstyle: $(filter-out $(EXCLUDE_FILES),$(PYSOURCES)) pydocstyle --add-ignore=D100,D101,D102,D103 $^ || true -pydocstyle_report.txt: $(filter-out schema_salad/metaschema.py,$(PYSOURCES)) +pydocstyle_report.txt: $(filter-out $(EXCLUDE_FILES),$(PYSOURCES)) pydocstyle setup.py $^ > $@ 2>&1 || true ## diff_pydocstyle_report : check Python docstring style for changed files only @@ -104,10 +105,10 @@ codespell: ## format : check/fix all code indentation and formatting (runs black) format: - black --exclude metaschema.py --exclude _version.py schema_salad setup.py mypy-stubs + black --exclude metaschema.py --exclude _version.py --exclude tests/cwl_v1_0.py --exclude tests/cwl_v1_1.py --exclude tests/cwl_v1_2.py schema_salad setup.py mypy-stubs format-check: - black --diff --check --exclude metaschema.py --exclude _version.py schema_salad setup.py mypy-stubs + black --diff --check --exclude metaschema.py --exclude _version.py --exclude tests/cwl_v1_0.py --exclude tests/cwl_v1_1.py --exclude tests/cwl_v1_2.py schema_salad setup.py mypy-stubs ## pylint : run static code analysis on Python code pylint: $(PYSOURCES) @@ -203,7 +204,7 @@ compute-metaschema-hash: shellcheck: FORCE shellcheck build-schema_salad-docker.sh release-test.sh -pyupgrade: $(filter-out schema_salad/metaschema.py,$(PYSOURCES)) +pyupgrade: $(filter-out $(EXCLUDE_FILES),$(PYSOURCES)) pyupgrade --exit-zero-even-if-changed --py36-plus $^ release-test: FORCE diff --git a/schema_salad/tests/cwl_v1_0.py b/schema_salad/tests/cwl_v1_0.py new file mode 100644 index 000000000..51028e9cc --- /dev/null +++ b/schema_salad/tests/cwl_v1_0.py @@ -0,0 +1,19739 @@ +# +# This file was autogenerated using schema-salad-tool --codegen=python +# The code itself is released under the Apache 2.0 license and the help text is +# subject to the license of the original schema. +import copy +import logging +import os +import pathlib +import re +import tempfile +import uuid as _uuid__ # pylint: disable=unused-import # noqa: F401 +import xml.sax # nosec +from abc import ABC, abstractmethod +from io import StringIO +from typing import ( + Any, + Dict, + List, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit +from urllib.request import pathname2url + +from rdflib import Graph +from rdflib.plugins.parsers.notation3 import BadSyntax +from ruamel.yaml.comments import CommentedMap, CommentedSeq + +from schema_salad.exceptions import SchemaSaladException, ValidationException +from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher +from schema_salad.sourceline import SourceLine, add_lc_filename +from schema_salad.utils import CacheType, yaml_no_ts # requires schema-salad v8.2+ + +_vocab: Dict[str, str] = {} +_rvocab: Dict[str, str] = {} + +_logger = logging.getLogger("salad") + + +IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] + + +doc_line_info = CommentedMap() +inserted_line_info: Dict[int, int] = {} + + +class LoadingOptions: + idx: IdxType + fileuri: Optional[str] + baseuri: str + namespaces: MutableMapping[str, str] + schemas: MutableSequence[str] + original_doc: Optional[Any] + addl_metadata: MutableMapping[str, Any] + fetcher: Fetcher + vocab: Dict[str, str] + rvocab: Dict[str, str] + cache: CacheType + imports: List[str] + includes: List[str] + + def __init__( + self, + fetcher: Optional[Fetcher] = None, + namespaces: Optional[Dict[str, str]] = None, + schemas: Optional[List[str]] = None, + fileuri: Optional[str] = None, + copyfrom: Optional["LoadingOptions"] = None, + original_doc: Optional[Any] = None, + addl_metadata: Optional[Dict[str, str]] = None, + baseuri: Optional[str] = None, + idx: Optional[IdxType] = None, + imports: Optional[List[str]] = None, + includes: Optional[List[str]] = None, + ) -> None: + """Create a LoadingOptions object.""" + self.original_doc = original_doc + + if idx is not None: + self.idx = idx + else: + self.idx = copyfrom.idx if copyfrom is not None else {} + + if fileuri is not None: + self.fileuri = fileuri + else: + self.fileuri = copyfrom.fileuri if copyfrom is not None else None + + if baseuri is not None: + self.baseuri = baseuri + else: + self.baseuri = copyfrom.baseuri if copyfrom is not None else "" + + if namespaces is not None: + self.namespaces = namespaces + else: + self.namespaces = copyfrom.namespaces if copyfrom is not None else {} + + if schemas is not None: + self.schemas = schemas + else: + self.schemas = copyfrom.schemas if copyfrom is not None else [] + + if addl_metadata is not None: + self.addl_metadata = addl_metadata + else: + self.addl_metadata = copyfrom.addl_metadata if copyfrom is not None else {} + + if imports is not None: + self.imports = imports + else: + self.imports = copyfrom.imports if copyfrom is not None else [] + + if includes is not None: + self.includes = includes + else: + self.includes = copyfrom.includes if copyfrom is not None else [] + + if fetcher is not None: + self.fetcher = fetcher + elif copyfrom is not None: + self.fetcher = copyfrom.fetcher + else: + import requests + from cachecontrol.caches import FileCache + from cachecontrol.wrapper import CacheControl + + root = pathlib.Path(os.environ.get("HOME", tempfile.gettempdir())) + session = CacheControl( + requests.Session(), + cache=FileCache(root / ".cache" / "salad"), + ) + self.fetcher: Fetcher = DefaultFetcher({}, session) + + self.cache = ( + self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} + ) + + self.vocab = _vocab + self.rvocab = _rvocab + + if namespaces is not None: + self.vocab = self.vocab.copy() + self.rvocab = self.rvocab.copy() + for k, v in namespaces.items(): + self.vocab[k] = v + self.rvocab[v] = k + + @property + def graph(self) -> Graph: + """Generate a merged rdflib.Graph from all entries in self.schemas.""" + graph = Graph() + if not self.schemas: + return graph + key = str(hash(tuple(self.schemas))) + if key in self.cache: + return cast(Graph, self.cache[key]) + for schema in self.schemas: + fetchurl = ( + self.fetcher.urljoin(self.fileuri, schema) + if self.fileuri is not None + else pathlib.Path(schema).resolve().as_uri() + ) + if fetchurl not in self.cache or self.cache[fetchurl] is True: + _logger.debug("Getting external schema %s", fetchurl) + try: + content = self.fetcher.fetch_text(fetchurl) + except Exception as e: + _logger.warning( + "Could not load extension schema %s: %s", fetchurl, str(e) + ) + continue + newGraph = Graph() + err_msg = "unknown error" + for fmt in ["xml", "turtle"]: + try: + newGraph.parse(data=content, format=fmt, publicID=str(fetchurl)) + self.cache[fetchurl] = newGraph + graph += newGraph + break + except (xml.sax.SAXParseException, TypeError, BadSyntax) as e: + err_msg = str(e) + else: + _logger.warning( + "Could not load extension schema %s: %s", fetchurl, err_msg + ) + self.cache[key] = graph + return graph + + +class Saveable(ABC): + """Mark classes than have a save() and fromDoc() function.""" + + @classmethod + @abstractmethod + def fromDoc( + cls, + _doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Saveable": + """Construct this object from the result of yaml.load().""" + + @abstractmethod + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + ) -> CommentedMap: + """Convert this object to a JSON/YAML friendly dictionary.""" + + +def load_field(val, fieldtype, baseuri, loadingOptions): + # type: (Union[str, Dict[str, str]], _Loader, str, LoadingOptions) -> Any + if isinstance(val, MutableMapping): + if "$import" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]) + result, metadata = _document_load_by_url( + fieldtype, + url, + loadingOptions, + ) + loadingOptions.imports.append(url) + return result + elif "$include" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url = loadingOptions.fetcher.urljoin( + loadingOptions.fileuri, val["$include"] + ) + val = loadingOptions.fetcher.fetch_text(url) + loadingOptions.includes.append(url) + return fieldtype.load(val, baseuri, loadingOptions) + + +save_type = Optional[ + Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str] +] + + +def add_kv( + old_doc: CommentedMap, + new_doc: CommentedMap, + line_numbers: Dict[Any, Dict[str, int]], + key: str, + val: Any, + max_len: int, + cols: Dict[int, int], + min_col: int = 0, +) -> int: + """Add key value pair into Commented Map. + + Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers + for each key/val pair in the old CommentedMap,key/val pair to insert, max_line of the old CommentedMap, + and max col value taken for each line. + """ + if len(inserted_line_info.keys()) >= 1: + max_line = max(inserted_line_info.keys()) + 1 + else: + max_line = 0 + if ( + key in line_numbers + ): # If the key to insert is in the original CommentedMap as a key + line_info = old_doc.lc.data[key] + if line_info[0] not in inserted_line_info: + new_doc.lc.add_kv_line_col(key, old_doc.lc.data[key]) + inserted_line_info[old_doc.lc.data[key][0]] = old_doc.lc.data[key][1] + else: + line = line_info[0] + while line in inserted_line_info.keys(): + line += 1 + new_doc.lc.add_kv_line_col( + key, + [ + line, + old_doc.lc.data[key][1], + line + (line - old_doc.lc.data[key][2]), + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[line] = old_doc.lc.data[key][1] + return max_len + elif isinstance(val, (int, float, str)) and not isinstance( + val, bool + ): # If the value is hashable + if val in line_numbers: # If the value is in the original CommentedMap + line = line_numbers[val]["line"] + if line in inserted_line_info: + line = max_line + if line in cols: + col = max(line_numbers[val]["col"], cols[line]) + else: + col = line_numbers[val]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + cols[line] = col + len("id") + 2 + return max_len + elif isinstance(val, str): + if val + "?" in line_numbers: + line = line_numbers[val + "?"]["line"] + if line in inserted_line_info: + line = max_line + if line in cols: + col = max(line_numbers[val + "?"]["col"], cols[line]) + else: + col = line_numbers[val + "?"]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + cols[line] = col + len("id") + 2 + return max_len + elif old_doc: + if val in old_doc: + index = old_doc.lc.data.index(val) + line_info = old_doc.lc.data[index] + if line_info[0] not in inserted_line_info: + new_doc.lc.add_kv_line_col(key, old_doc.lc.data[index]) + inserted_line_info[old_doc.lc.data[index][0]] = old_doc.lc.data[ + index + ][1] + else: + new_doc.lc.add_kv_line_col( + key, + [ + max_line, + old_doc.lc.data[index][1], + max_line + (max_line - old_doc.lc.data[index][2]), + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[max_line] = old_doc.lc.data[index][1] + # If neither the key or value is in the original CommentedMap (or value is not hashable) + new_doc.lc.add_kv_line_col( + key, [max_line, min_col, max_line, min_col + len(key) + 2] + ) + inserted_line_info[max_line] = min_col + len(key) + 2 + return max_len + 1 + + +def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: + """Get line numbers for kv pairs in CommentedMap. + + For each key/value pair in a CommentedMap, save the line/col info into a dictionary, + only save value info if value is hashable. + """ + line_numbers: Dict[Any, Dict[str, int]] = {} + if doc is None: + return {} + if doc.lc.data is None: + return {} + for key, value in doc.lc.data.items(): + line_numbers[key] = {} + + line_numbers[key]["line"] = doc.lc.data[key][0] + line_numbers[key]["col"] = doc.lc.data[key][1] + if isinstance(value, (int, float, bool, str)): + line_numbers[value] = {} + line_numbers[value]["line"] = doc.lc.data[key][2] + line_numbers[value]["col"] = doc.lc.data[key][3] + return line_numbers + + +def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> int: + min_col = 0 + for line in line_numbers: + if line_numbers[line]["col"] > min_col: + min_col = line_numbers[line]["col"] + return min_col + + +def get_max_line_num(doc: CommentedMap) -> int: + """Get the max line number for a CommentedMap. + + Iterate through the the key with the highest line number until you reach a non-CommentedMap value + or empty CommentedMap. + """ + max_line = 0 + max_key = "" + cur = doc + while isinstance(cur, CommentedMap) and len(cur) > 0: + for key in cur.lc.data.keys(): + if cur.lc.data[key][2] >= max_line: + max_line = cur.lc.data[key][2] + max_key = key + cur = cur[max_key] + return max_line + 1 + + +def save( + val: Any, + top: bool = True, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, +) -> save_type: + """Save a val of any type. + + Recursively calls save method from class if val is of type Saveable. + Otherwise, saves val to CommentedMap or CommentedSeq. + """ + if keys is None: + keys = [] + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if isinstance(val, Saveable): + return val.save( + top=top, base_url=base_url, relative_uris=relative_uris, keys=keys + ) + if isinstance(val, MutableSequence): + r = CommentedSeq() + r.lc.data = {} + for i in range(0, len(val)): + new_keys = keys + if doc: + if str(i) in doc: + r.lc.data[i] = doc.lc.data[i] + new_keys.append(i) + r.append( + save( + val[i], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, + ) + ) + return r + # return [ + # save(v, top=False, base_url=base_url, relative_uris=relative_uris) + # for v in val + # ] + if isinstance(val, MutableMapping): + newdict = CommentedMap() + new_keys = keys + for key in val: + if doc: + if key in doc: + newdict.lc.add_kv_line_col(key, doc.lc.data[key]) + new_keys.append(key) + + newdict[key] = save( + val[key], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, + ) + return newdict + # newdict = {} + # for key in val: + # newdict[key] = save( + # val[key], top=False, base_url=base_url, relative_uris=relative_uris + # ) + # return newdict + if val is None or isinstance(val, (int, float, bool, str)): + return val + raise Exception("Not Saveable: %s" % type(val)) + + +def save_with_metadata( + val: Any, + valLoadingOpts: LoadingOptions, + top: bool = True, + base_url: str = "", + relative_uris: bool = True, +) -> save_type: + """Save and set $namespaces, $schemas, $base and any other metadata fields at the top level.""" + saved_val = save(val, top, base_url, relative_uris) + newdict: MutableMapping[str, Any] = {} + if isinstance(saved_val, MutableSequence): + newdict = {"$graph": saved_val} + elif isinstance(saved_val, MutableMapping): + newdict = saved_val + + if valLoadingOpts.namespaces: + newdict["$namespaces"] = valLoadingOpts.namespaces + if valLoadingOpts.schemas: + newdict["$schemas"] = valLoadingOpts.schemas + if valLoadingOpts.baseuri: + newdict["$base"] = valLoadingOpts.baseuri + for k, v in valLoadingOpts.addl_metadata.items(): + if k not in newdict: + newdict[k] = v + + return newdict + + +def expand_url( + url, # type: str + base_url, # type: str + loadingOptions, # type: LoadingOptions + scoped_id=False, # type: bool + vocab_term=False, # type: bool + scoped_ref=None, # type: Optional[int] +): + # type: (...) -> str + if url in ("@id", "@type"): + return url + + if vocab_term and url in loadingOptions.vocab: + return url + + if bool(loadingOptions.vocab) and ":" in url: + prefix = url.split(":")[0] + if prefix in loadingOptions.vocab: + url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :] + + split = urlsplit(url) + + if ( + ( + bool(split.scheme) + and split.scheme in loadingOptions.fetcher.supported_schemes() + ) + or url.startswith("$(") + or url.startswith("${") + ): + pass + elif scoped_id and not bool(split.fragment): + splitbase = urlsplit(base_url) + frg = "" + if bool(splitbase.fragment): + frg = splitbase.fragment + "/" + split.path + else: + frg = split.path + pt = splitbase.path if splitbase.path != "" else "/" + url = urlunsplit((splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg)) + elif scoped_ref is not None and not bool(split.fragment): + splitbase = urlsplit(base_url) + sp = splitbase.fragment.split("/") + n = scoped_ref + while n > 0 and len(sp) > 0: + sp.pop() + n -= 1 + sp.append(url) + url = urlunsplit( + ( + splitbase.scheme, + splitbase.netloc, + splitbase.path, + splitbase.query, + "/".join(sp), + ) + ) + else: + url = loadingOptions.fetcher.urljoin(base_url, url) + + if vocab_term: + split = urlsplit(url) + if bool(split.scheme): + if url in loadingOptions.rvocab: + return loadingOptions.rvocab[url] + else: + raise ValidationException(f"Term '{url}' not in vocabulary") + + return url + + +class _Loader: + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + pass + + +class _AnyLoader(_Loader): + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if doc is not None: + return doc + raise ValidationException("Expected non-null") + + +class _PrimitiveLoader(_Loader): + def __init__(self, tp): + # type: (Union[type, Tuple[Type[str], Type[str]]]) -> None + self.tp = tp + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, self.tp): + raise ValidationException( + "Expected a {} but got {}".format( + self.tp.__class__.__name__, doc.__class__.__name__ + ) + ) + return doc + + def __repr__(self): # type: () -> str + return str(self.tp) + + +class _ArrayLoader(_Loader): + def __init__(self, items): + # type: (_Loader) -> None + self.items = items + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, MutableSequence): + raise ValidationException(f"Expected a list, was {type(doc)}") + r = [] # type: List[Any] + errors = [] # type: List[SchemaSaladException] + for i in range(0, len(doc)): + try: + lf = load_field( + doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions + ) + if isinstance(lf, MutableSequence): + r.extend(lf) + else: + r.append(lf) + except ValidationException as e: + errors.append(e.with_sourceline(SourceLine(doc, i, str))) + if errors: + raise ValidationException("", None, errors) + return r + + def __repr__(self): # type: () -> str + return f"array<{self.items}>" + + +class _EnumLoader(_Loader): + def __init__(self, symbols: Sequence[str], name: str) -> None: + self.symbols = symbols + self.name = name + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if doc in self.symbols: + return doc + else: + raise ValidationException(f"Expected one of {self.symbols}") + + def __repr__(self): # type: () -> str + return self.name + + +class _SecondaryDSLLoader(_Loader): + def __init__(self, inner): + # type: (_Loader) -> None + self.inner = inner + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + r: List[Dict[str, Any]] = [] + if isinstance(doc, MutableSequence): + for d in doc: + if isinstance(d, str): + if d.endswith("?"): + r.append({"pattern": d[:-1], "required": False}) + else: + r.append({"pattern": d}) + elif isinstance(d, dict): + new_dict: Dict[str, Any] = {} + dict_copy = copy.deepcopy(d) + if "pattern" in dict_copy: + new_dict["pattern"] = dict_copy.pop("pattern") + else: + raise ValidationException( + "Missing pattern in secondaryFiles specification entry: {}".format( + d + ) + ) + new_dict["required"] = ( + dict_copy.pop("required") if "required" in dict_copy else None + ) + + if len(dict_copy): + raise ValidationException( + "Unallowed values in secondaryFiles specification entry: {}".format( + dict_copy + ) + ) + r.append(new_dict) + + else: + raise ValidationException( + "Expected a string or sequence of (strings or mappings)." + ) + elif isinstance(doc, MutableMapping): + new_dict = {} + doc_copy = copy.deepcopy(doc) + if "pattern" in doc_copy: + new_dict["pattern"] = doc_copy.pop("pattern") + else: + raise ValidationException( + "Missing pattern in secondaryFiles specification entry: {}".format( + doc + ) + ) + new_dict["required"] = ( + doc_copy.pop("required") if "required" in doc_copy else None + ) + + if len(doc_copy): + raise ValidationException( + "Unallowed values in secondaryFiles specification entry: {}".format( + doc_copy + ) + ) + r.append(new_dict) + + elif isinstance(doc, str): + if doc.endswith("?"): + r.append({"pattern": doc[:-1], "required": False}) + else: + r.append({"pattern": doc}) + else: + raise ValidationException("Expected str or sequence of str") + return self.inner.load(r, baseuri, loadingOptions, docRoot) + + +class _RecordLoader(_Loader): + def __init__(self, classtype): + # type: (Type[Saveable]) -> None + self.classtype = classtype + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, MutableMapping): + raise ValidationException(f"Expected a dict, was {type(doc)}") + return self.classtype.fromDoc(doc, baseuri, loadingOptions, docRoot=docRoot) + + def __repr__(self): # type: () -> str + return str(self.classtype.__name__) + + +class _ExpressionLoader(_Loader): + def __init__(self, items: Type[str]) -> None: + self.items = items + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, str): + raise ValidationException(f"Expected a str, was {type(doc)}") + return doc + + +class _UnionLoader(_Loader): + def __init__(self, alternates: Sequence[_Loader]) -> None: + self.alternates = alternates + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + errors = [] + for t in self.alternates: + try: + return t.load(doc, baseuri, loadingOptions, docRoot=docRoot) + except ValidationException as e: + errors.append(ValidationException(f"tried {t} but", None, [e])) + raise ValidationException("", None, errors, "-") + + def __repr__(self): # type: () -> str + return " | ".join(str(a) for a in self.alternates) + + +class _URILoader(_Loader): + def __init__(self, inner, scoped_id, vocab_term, scoped_ref): + # type: (_Loader, bool, bool, Union[int, None]) -> None + self.inner = inner + self.scoped_id = scoped_id + self.vocab_term = vocab_term + self.scoped_ref = scoped_ref + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableSequence): + newdoc = [] + for i in doc: + if isinstance(i, str): + newdoc.append( + expand_url( + i, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + ) + else: + newdoc.append(i) + doc = newdoc + elif isinstance(doc, str): + doc = expand_url( + doc, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + return self.inner.load(doc, baseuri, loadingOptions) + + +class _TypeDSLLoader(_Loader): + typeDSLregex = re.compile(r"^([^[?]+)(\[\])?(\?)?$") + + def __init__(self, inner, refScope): + # type: (_Loader, Union[int, None]) -> None + self.inner = inner + self.refScope = refScope + + def resolve( + self, + doc, # type: str + baseuri, # type: str + loadingOptions, # type: LoadingOptions + ): + # type: (...) -> Union[List[Union[Dict[str, str], str]], Dict[str, str], str] + m = self.typeDSLregex.match(doc) + if m: + group1 = m.group(1) + assert group1 is not None # nosec + first = expand_url( + group1, baseuri, loadingOptions, False, True, self.refScope + ) + second = third = None + if bool(m.group(2)): + second = {"type": "array", "items": first} + # second = CommentedMap((("type", "array"), + # ("items", first))) + # second.lc.add_kv_line_col("type", lc) + # second.lc.add_kv_line_col("items", lc) + # second.lc.filename = filename + if bool(m.group(3)): + third = ["null", second or first] + # third = CommentedSeq(["null", second or first]) + # third.lc.add_kv_line_col(0, lc) + # third.lc.add_kv_line_col(1, lc) + # third.lc.filename = filename + return third or second or first + return doc + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableSequence): + r = [] # type: List[Any] + for d in doc: + if isinstance(d, str): + resolved = self.resolve(d, baseuri, loadingOptions) + if isinstance(resolved, MutableSequence): + for i in resolved: + if i not in r: + r.append(i) + else: + if resolved not in r: + r.append(resolved) + else: + r.append(d) + doc = r + elif isinstance(doc, str): + doc = self.resolve(doc, baseuri, loadingOptions) + + return self.inner.load(doc, baseuri, loadingOptions) + + +class _IdMapLoader(_Loader): + def __init__(self, inner, mapSubject, mapPredicate): + # type: (_Loader, str, Union[str, None]) -> None + self.inner = inner + self.mapSubject = mapSubject + self.mapPredicate = mapPredicate + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableMapping): + r = [] # type: List[Any] + for k in sorted(doc.keys()): + val = doc[k] + if isinstance(val, CommentedMap): + v = copy.copy(val) + v.lc.data = val.lc.data + v.lc.filename = val.lc.filename + v[self.mapSubject] = k + r.append(v) + elif isinstance(val, MutableMapping): + v2 = copy.copy(val) + v2[self.mapSubject] = k + r.append(v2) + else: + if self.mapPredicate: + v3 = {self.mapPredicate: val} + v3[self.mapSubject] = k + r.append(v3) + else: + raise ValidationException("No mapPredicate") + doc = r + return self.inner.load(doc, baseuri, loadingOptions) + + +def _document_load( + loader: _Loader, + doc: Union[CommentedMap, str, MutableMapping[str, Any], MutableSequence[Any]], + baseuri: str, + loadingOptions: LoadingOptions, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> Tuple[Any, LoadingOptions]: + if isinstance(doc, str): + return _document_load_by_url( + loader, + loadingOptions.fetcher.urljoin(baseuri, doc), + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + if isinstance(doc, MutableMapping): + addl_metadata = {} + if addl_metadata_fields is not None: + for mf in addl_metadata_fields: + if mf in doc: + addl_metadata[mf] = doc[mf] + + docuri = baseuri + if "$base" in doc: + baseuri = doc["$base"] + + loadingOptions = LoadingOptions( + copyfrom=loadingOptions, + namespaces=doc.get("$namespaces", None), + schemas=doc.get("$schemas", None), + baseuri=doc.get("$base", None), + addl_metadata=addl_metadata, + ) + + # doc = { + # k: v + # for k, v in doc.items() + # if k not in ("$namespaces", "$schemas", "$base") + # } + doc = copy.copy(doc) + if "$namespaces" in doc: + doc.pop("$namespaces") + if "$schemas" in doc: + doc.pop("$schemas") + if "$base" in doc: + doc.pop("$base") + + if isinstance(doc, CommentedMap): + global doc_line_info + doc_line_info = doc + + if "$graph" in doc: + loadingOptions.idx[baseuri] = ( + loader.load(doc["$graph"], baseuri, loadingOptions), + loadingOptions, + ) + else: + loadingOptions.idx[baseuri] = ( + loader.load(doc, baseuri, loadingOptions, docRoot=baseuri), + loadingOptions, + ) + + if docuri != baseuri: + loadingOptions.idx[docuri] = loadingOptions.idx[baseuri] + + return loadingOptions.idx[baseuri] + if isinstance(doc, MutableSequence): + loadingOptions.idx[baseuri] = ( + loader.load(doc, baseuri, loadingOptions), + loadingOptions, + ) + return loadingOptions.idx[baseuri] + + raise ValidationException( + "Expected URI string, MutableMapping or MutableSequence, got %s" % type(doc) + ) + + +def _document_load_by_url( + loader: _Loader, + url: str, + loadingOptions: LoadingOptions, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> Tuple[Any, LoadingOptions]: + if url in loadingOptions.idx: + return loadingOptions.idx[url] + + doc_url, frg = urldefrag(url) + + text = loadingOptions.fetcher.fetch_text(doc_url) + if isinstance(text, bytes): + textIO = StringIO(text.decode("utf-8")) + else: + textIO = StringIO(text) + textIO.name = str(doc_url) + yaml = yaml_no_ts() + result = yaml.load(textIO) + add_lc_filename(result, doc_url) + + loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=doc_url) + + _document_load( + loader, + result, + doc_url, + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + return loadingOptions.idx[url] + + +def file_uri(path, split_frag=False): # type: (str, bool) -> str + if path.startswith("file://"): + return path + if split_frag: + pathsp = path.split("#", 2) + frag = "#" + quote(str(pathsp[1])) if len(pathsp) == 2 else "" + urlpath = pathname2url(str(pathsp[0])) + else: + urlpath = pathname2url(path) + frag = "" + if urlpath.startswith("//"): + return f"file:{urlpath}{frag}" + else: + return f"file://{urlpath}{frag}" + + +def prefix_url(url: str, namespaces: Dict[str, str]) -> str: + """Expand short forms into full URLs using the given namespace dictionary.""" + for k, v in namespaces.items(): + if url.startswith(v): + return k + ":" + url[len(v) :] + return url + + +def save_relative_uri( + uri: Any, + base_url: str, + scoped_id: bool, + ref_scope: Optional[int], + relative_uris: bool, +) -> Any: + """Convert any URI to a relative one, obeying the scoping rules.""" + if isinstance(uri, MutableSequence): + return [ + save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) + for u in uri + ] + elif isinstance(uri, str): + if not relative_uris or uri == base_url: + return uri + urisplit = urlsplit(uri) + basesplit = urlsplit(base_url) + if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: + if urisplit.path != basesplit.path: + p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) + if urisplit.fragment: + p = p + "#" + urisplit.fragment + return p + + basefrag = basesplit.fragment + "/" + if ref_scope: + sp = basefrag.split("/") + i = 0 + while i < ref_scope: + sp.pop() + i += 1 + basefrag = "/".join(sp) + + if urisplit.fragment.startswith(basefrag): + return urisplit.fragment[len(basefrag) :] + else: + return urisplit.fragment + return uri + else: + return save(uri, top=False, base_url=base_url, relative_uris=relative_uris) + + +def shortname(inputid: str) -> str: + """ + Compute the shortname of a fully qualified identifier. + + See https://w3id.org/cwl/v1.2/SchemaSalad.html#Short_names. + """ + parsed_id = urlparse(inputid) + if parsed_id.fragment: + return parsed_id.fragment.split("/")[-1] + return parsed_id.path.split("/")[-1] + + +def parser_info() -> str: + return "org.w3id.cwl.v1_0" + + +class RecordField(Saveable): + """ + A field of a record. + """ + + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.doc = doc + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, RecordField): + return bool( + self.name == other.name + and self.doc == other.doc + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.doc, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "RecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `doc`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'RecordField'", None, _errors__) + _constructed = cls( + name=name, + doc=doc, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "doc", "type"]) + + +class RecordSchema(Saveable): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, RecordSchema): + return bool(self.fields == other.fields and self.type == other.type) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "RecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_symbolLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'RecordSchema'", None, _errors__) + _constructed = cls( + fields=fields, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type"]) + + +class EnumSchema(Saveable): + """ + Define an enumerated type. + + """ + + def __init__( + self, + symbols: Any, + type: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.symbols = symbols + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnumSchema): + return bool(self.symbols == other.symbols and self.type == other.type) + return False + + def __hash__(self) -> int: + return hash((self.symbols, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "EnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_symbolLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `symbols`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'EnumSchema'", None, _errors__) + _constructed = cls( + symbols=symbols, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) + r["symbols"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["symbols", "type"]) + + +class ArraySchema(Saveable): + def __init__( + self, + items: Any, + type: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ArraySchema): + return bool(self.items == other.items and self.type == other.type) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + items = load_field( + _doc.get("items"), + typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_symbolLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ArraySchema'", None, _errors__) + _constructed = cls( + items=items, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.items is not None and "items" not in r: + r["items"] = save( + self.items, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type"]) + + +class File(Saveable): + """ + Represents a file (or group of files when `secondaryFiles` is provided) that + will be accessible by tools using standard POSIX file system call API such as + open(2) and read(2). + + Files are represented as objects with `class` of `File`. File objects have + a number of properties that provide metadata about the file. + + The `location` property of a File is a URI that uniquely identifies the + file. Implementations must support the file:// URI scheme and may support + other schemes such as http://. The value of `location` may also be a + relative reference, in which case it must be resolved relative to the URI + of the document it appears in. Alternately to `location`, implementations + must also accept the `path` property on File, which must be a filesystem + path available on the same host as the CWL runner (for inputs) or the + runtime environment of a command line tool execution (for command line tool + outputs). + + If no `location` or `path` is specified, a file object must specify + `contents` with the UTF-8 text content of the file. This is a "file + literal". File literals do not correspond to external resources, but are + created on disk with `contents` with when needed for a executing a tool. + Where appropriate, expressions can return file literals to define new files + on a runtime. The maximum size of `contents` is 64 kilobytes. + + The `basename` property defines the filename on disk where the file is + staged. This may differ from the resource name. If not provided, + `basename` must be computed from the last path part of `location` and made + available to expressions. + + The `secondaryFiles` property is a list of File or Directory objects that + must be staged in the same directory as the primary file. It is an error + for file names to be duplicated in `secondaryFiles`. + + The `size` property is the size in bytes of the File. It must be computed + from the resource and made available to expressions. The `checksum` field + contains a cryptographic hash of the file content for use it verifying file + contents. Implementations may, at user option, enable or disable + computation of the `checksum` field for performance or other reasons. + However, the ability to compute output checksums is required to pass the + CWL conformance test suite. + + When executing a CommandLineTool, the files and secondary files may be + staged to an arbitrary directory, but must use the value of `basename` for + the filename. The `path` property must be file path in the context of the + tool execution runtime (local to the compute node, or within the executing + container). All computed properties should be available to expressions. + File literals also must be staged and `path` must be set. + + When collecting CommandLineTool outputs, `glob` matching returns file paths + (with the `path` property) and the derived properties. This can all be + modified by `outputEval`. Alternately, if the file `cwl.output.json` is + present in the output, `outputBinding` is ignored. + + File objects in the output must provide either a `location` URI or a `path` + property in the context of the tool execution runtime (local to the compute + node, or within the executing container). + + When evaluating an ExpressionTool, file objects must be referenced via + `location` (the expression tool does not have access to files on disk so + `path` is meaningless) or as file literals. It is legal to return a file + object with an existing `location` but a different `basename`. The + `loadContents` field of ExpressionTool inputs behaves the same as on + CommandLineTool inputs, however it is not meaningful on the outputs. + + An ExpressionTool may forward file references from input to output by using + the same value for `location`. + + """ + + def __init__( + self, + location: Optional[Any] = None, + path: Optional[Any] = None, + basename: Optional[Any] = None, + dirname: Optional[Any] = None, + nameroot: Optional[Any] = None, + nameext: Optional[Any] = None, + checksum: Optional[Any] = None, + size: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + format: Optional[Any] = None, + contents: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "File" + self.location = location + self.path = path + self.basename = basename + self.dirname = dirname + self.nameroot = nameroot + self.nameext = nameext + self.checksum = checksum + self.size = size + self.secondaryFiles = secondaryFiles + self.format = format + self.contents = contents + + def __eq__(self, other: Any) -> bool: + if isinstance(other, File): + return bool( + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.dirname == other.dirname + and self.nameroot == other.nameroot + and self.nameext == other.nameext + and self.checksum == other.checksum + and self.size == other.size + and self.secondaryFiles == other.secondaryFiles + and self.format == other.format + and self.contents == other.contents + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.location, + self.path, + self.basename, + self.dirname, + self.nameroot, + self.nameext, + self.checksum, + self.size, + self.secondaryFiles, + self.format, + self.contents, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "File": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "File": + raise ValidationException("Not a File") + + if "location" in _doc: + try: + location = load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), + [e], + ) + ) + else: + location = None + if "path" in _doc: + try: + path = load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [e], + ) + ) + else: + path = None + if "basename" in _doc: + try: + basename = load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + ) + ) + else: + basename = None + if "dirname" in _doc: + try: + dirname = load_field( + _doc.get("dirname"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dirname` field is not valid because:", + SourceLine(_doc, "dirname", str), + [e], + ) + ) + else: + dirname = None + if "nameroot" in _doc: + try: + nameroot = load_field( + _doc.get("nameroot"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `nameroot` field is not valid because:", + SourceLine(_doc, "nameroot", str), + [e], + ) + ) + else: + nameroot = None + if "nameext" in _doc: + try: + nameext = load_field( + _doc.get("nameext"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `nameext` field is not valid because:", + SourceLine(_doc, "nameext", str), + [e], + ) + ) + else: + nameext = None + if "checksum" in _doc: + try: + checksum = load_field( + _doc.get("checksum"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `checksum` field is not valid because:", + SourceLine(_doc, "checksum", str), + [e], + ) + ) + else: + checksum = None + if "size" in _doc: + try: + size = load_field( + _doc.get("size"), + union_of_None_type_or_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `size` field is not valid because:", + SourceLine(_doc, "size", str), + [e], + ) + ) + else: + size = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "contents" in _doc: + try: + contents = load_field( + _doc.get("contents"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `contents` field is not valid because:", + SourceLine(_doc, "contents", str), + [e], + ) + ) + else: + contents = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `dirname`, `nameroot`, `nameext`, `checksum`, `size`, `secondaryFiles`, `format`, `contents`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'File'", None, _errors__) + _constructed = cls( + location=location, + path=path, + basename=basename, + dirname=dirname, + nameroot=nameroot, + nameext=nameext, + checksum=checksum, + size=size, + secondaryFiles=secondaryFiles, + format=format, + contents=contents, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "File" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.location is not None and "location" not in r: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="location", + val=r.get("location"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.path is not None and "path" not in r: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="path", + val=r.get("path"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.basename is not None and "basename" not in r: + r["basename"] = save( + self.basename, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="basename", + val=r.get("basename"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dirname is not None and "dirname" not in r: + r["dirname"] = save( + self.dirname, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dirname", + val=r.get("dirname"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.nameroot is not None and "nameroot" not in r: + r["nameroot"] = save( + self.nameroot, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="nameroot", + val=r.get("nameroot"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.nameext is not None and "nameext" not in r: + r["nameext"] = save( + self.nameext, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="nameext", + val=r.get("nameext"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.checksum is not None and "checksum" not in r: + r["checksum"] = save( + self.checksum, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="checksum", + val=r.get("checksum"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.size is not None and "size" not in r: + r["size"] = save( + self.size, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="size", + val=r.get("size"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, base_url, True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.contents is not None and "contents" not in r: + r["contents"] = save( + self.contents, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="contents", + val=r.get("contents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "location", + "path", + "basename", + "dirname", + "nameroot", + "nameext", + "checksum", + "size", + "secondaryFiles", + "format", + "contents", + ] + ) + + +class Directory(Saveable): + """ + Represents a directory to present to a command line tool. + + Directories are represented as objects with `class` of `Directory`. Directory objects have + a number of properties that provide metadata about the directory. + + The `location` property of a Directory is a URI that uniquely identifies + the directory. Implementations must support the file:// URI scheme and may + support other schemes such as http://. Alternately to `location`, + implementations must also accept the `path` property on Directory, which + must be a filesystem path available on the same host as the CWL runner (for + inputs) or the runtime environment of a command line tool execution (for + command line tool outputs). + + A Directory object may have a `listing` field. This is a list of File and + Directory objects that are contained in the Directory. For each entry in + `listing`, the `basename` property defines the name of the File or + Subdirectory when staged to disk. If `listing` is not provided, the + implementation must have some way of fetching the Directory listing at + runtime based on the `location` field. + + If a Directory does not have `location`, it is a Directory literal. A + Directory literal must provide `listing`. Directory literals must be + created on disk at runtime as needed. + + The resources in a Directory literal do not need to have any implied + relationship in their `location`. For example, a Directory listing may + contain two files located on different hosts. It is the responsibility of + the runtime to ensure that those files are staged to disk appropriately. + Secondary files associated with files in `listing` must also be staged to + the same Directory. + + When executing a CommandLineTool, Directories must be recursively staged + first and have local values of `path` assigend. + + Directory objects in CommandLineTool output must provide either a + `location` URI or a `path` property in the context of the tool execution + runtime (local to the compute node, or within the executing container). + + An ExpressionTool may forward file references from input to output by using + the same value for `location`. + + Name conflicts (the same `basename` appearing multiple times in `listing` + or in any entry in `secondaryFiles` in the listing) is a fatal error. + + """ + + def __init__( + self, + location: Optional[Any] = None, + path: Optional[Any] = None, + basename: Optional[Any] = None, + listing: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "Directory" + self.location = location + self.path = path + self.basename = basename + self.listing = listing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Directory): + return bool( + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.listing == other.listing + ) + return False + + def __hash__(self) -> int: + return hash( + (self.class_, self.location, self.path, self.basename, self.listing) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Directory": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "Directory": + raise ValidationException("Not a Directory") + + if "location" in _doc: + try: + location = load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), + [e], + ) + ) + else: + location = None + if "path" in _doc: + try: + path = load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [e], + ) + ) + else: + path = None + if "basename" in _doc: + try: + basename = load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + ) + ) + else: + basename = None + if "listing" in _doc: + try: + listing = load_field( + _doc.get("listing"), + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [e], + ) + ) + else: + listing = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `listing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Directory'", None, _errors__) + _constructed = cls( + location=location, + path=path, + basename=basename, + listing=listing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "Directory" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.location is not None and "location" not in r: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="location", + val=r.get("location"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.path is not None and "path" not in r: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="path", + val=r.get("path"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.basename is not None and "basename" not in r: + r["basename"] = save( + self.basename, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="basename", + val=r.get("basename"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.listing is not None and "listing" not in r: + r["listing"] = save( + self.listing, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="listing", + val=r.get("listing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "location", "path", "basename", "listing"]) + + +class SchemaBase(Saveable): + pass + + +class Parameter(SchemaBase): + """ + Define an input or output parameter to a process. + + """ + + pass + + +class InputBinding(Saveable): + pass + + +class OutputBinding(Saveable): + pass + + +class InputSchema(SchemaBase): + pass + + +class OutputSchema(SchemaBase): + pass + + +class InputRecordField(RecordField): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + inputBinding: Optional[Any] = None, + label: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.doc = doc + self.type = type + self.inputBinding = inputBinding + self.label = label + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputRecordField): + return bool( + self.name == other.name + and self.doc == other.doc + and self.type == other.type + and self.inputBinding == other.inputBinding + and self.label == other.label + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.doc, self.type, self.inputBinding, self.label)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `doc`, `type`, `inputBinding`, `label`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputRecordField'", None, _errors__) + _constructed = cls( + name=name, + doc=doc, + type=type, + inputBinding=inputBinding, + label=label, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "doc", "type", "inputBinding", "label"]) + + +class InputRecordSchema(RecordSchema, InputSchema): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type, self.label, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_symbolLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputRecordSchema'", None, _errors__) + _constructed = cls( + fields=fields, + type=type, + label=label, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "name"]) + + +class InputEnumSchema(EnumSchema, InputSchema): + def __init__( + self, + symbols: Any, + type: Any, + label: Optional[Any] = None, + name: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.symbols = symbols + self.type = type + self.label = label + self.name = name + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputEnumSchema): + return bool( + self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.name == other.name + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash((self.symbols, self.type, self.label, self.name, self.inputBinding)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_symbolLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `symbols`, `type`, `label`, `name`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputEnumSchema'", None, _errors__) + _constructed = cls( + symbols=symbols, + type=type, + label=label, + name=name, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["symbols", "type", "label", "name", "inputBinding"]) + + +class InputArraySchema(ArraySchema, InputSchema): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type, self.label, self.inputBinding)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + items = load_field( + _doc.get("items"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_symbolLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputArraySchema'", None, _errors__) + _constructed = cls( + items=items, + type=type, + label=label, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.items is not None and "items" not in r: + r["items"] = save( + self.items, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "inputBinding"]) + + +class OutputRecordField(RecordField): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + outputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.doc = doc + self.type = type + self.outputBinding = outputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputRecordField): + return bool( + self.name == other.name + and self.doc == other.doc + and self.type == other.type + and self.outputBinding == other.outputBinding + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.doc, self.type, self.outputBinding)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `doc`, `type`, `outputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputRecordField'", None, _errors__) + _constructed = cls( + name=name, + doc=doc, + type=type, + outputBinding=outputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "doc", "type", "outputBinding"]) + + +class OutputRecordSchema(RecordSchema, OutputSchema): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type, self.label)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_symbolLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputRecordSchema'", None, _errors__) + _constructed = cls( + fields=fields, + type=type, + label=label, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label"]) + + +class OutputEnumSchema(EnumSchema, OutputSchema): + def __init__( + self, + symbols: Any, + type: Any, + label: Optional[Any] = None, + outputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.symbols = symbols + self.type = type + self.label = label + self.outputBinding = outputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputEnumSchema): + return bool( + self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.outputBinding == other.outputBinding + ) + return False + + def __hash__(self) -> int: + return hash((self.symbols, self.type, self.label, self.outputBinding)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_symbolLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `symbols`, `type`, `label`, `outputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputEnumSchema'", None, _errors__) + _constructed = cls( + symbols=symbols, + type=type, + label=label, + outputBinding=outputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) + r["symbols"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["symbols", "type", "label", "outputBinding"]) + + +class OutputArraySchema(ArraySchema, OutputSchema): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + outputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.outputBinding = outputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.outputBinding == other.outputBinding + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type, self.label, self.outputBinding)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + items = load_field( + _doc.get("items"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_symbolLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `outputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputArraySchema'", None, _errors__) + _constructed = cls( + items=items, + type=type, + label=label, + outputBinding=outputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.items is not None and "items" not in r: + r["items"] = save( + self.items, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "outputBinding"]) + + +class InputParameter(Parameter): + def __init__( + self, + id: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + format: Optional[Any] = None, + inputBinding: Optional[Any] = None, + default: Optional[Any] = None, + type: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.inputBinding = inputBinding + self.default = default + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.inputBinding == other.inputBinding + and self.default == other.default + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.inputBinding, + self.default, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + raise ValidationException("Missing id") + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + if "type" in _doc: + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + else: + type = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `inputBinding`, `default`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputParameter'", None, _errors__) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + inputBinding=inputBinding, + default=default, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "inputBinding", + "default", + "type", + ] + ) + + +class OutputParameter(Parameter): + def __init__( + self, + id: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + outputBinding: Optional[Any] = None, + format: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.outputBinding = outputBinding + self.format = format + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.outputBinding == other.outputBinding + and self.format == other.format + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.outputBinding, + self.format, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + raise ValidationException("Missing id") + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputParameter'", None, _errors__) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + outputBinding=outputBinding, + format=format, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "outputBinding", + "format", + ] + ) + + +class ProcessRequirement(Saveable): + """ + A process requirement declares a prerequisite that may or must be fulfilled + before executing a process. See [`Process.hints`](#process) and + [`Process.requirements`](#process). + + Process requirements are the primary mechanism for specifying extensions to + the CWL core specification. + + """ + + pass + + +class Process(Saveable): + """ + + The base executable type in CWL is the `Process` object defined by the + document. Note that the `Process` object is abstract and cannot be + directly executed. + + """ + + pass + + +class InlineJavascriptRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support inline Javascript expressions. + If this requirement is not present, the workflow platform must not perform expression + interpolatation. + + """ + + def __init__( + self, + expressionLib: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "InlineJavascriptRequirement" + self.expressionLib = expressionLib + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InlineJavascriptRequirement): + return bool( + self.class_ == other.class_ + and self.expressionLib == other.expressionLib + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.expressionLib)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InlineJavascriptRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "InlineJavascriptRequirement": + raise ValidationException("Not a InlineJavascriptRequirement") + + if "expressionLib" in _doc: + try: + expressionLib = load_field( + _doc.get("expressionLib"), + union_of_None_type_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `expressionLib` field is not valid because:", + SourceLine(_doc, "expressionLib", str), + [e], + ) + ) + else: + expressionLib = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `expressionLib`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'InlineJavascriptRequirement'", None, _errors__ + ) + _constructed = cls( + expressionLib=expressionLib, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "InlineJavascriptRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.expressionLib is not None and "expressionLib" not in r: + r["expressionLib"] = save( + self.expressionLib, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="expressionLib", + val=r.get("expressionLib"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "expressionLib"]) + + +class SchemaDefRequirement(ProcessRequirement): + """ + This field consists of an array of type definitions which must be used when + interpreting the `inputs` and `outputs` fields. When a `type` field + contain a IRI, the implementation must check if the type is defined in + `schemaDefs` and use that definition. If the type is not found in + `schemaDefs`, it is an error. The entries in `schemaDefs` must be + processed in the order listed such that later schema definitions may refer + to earlier schema definitions. + + """ + + def __init__( + self, + types: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SchemaDefRequirement" + self.types = types + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SchemaDefRequirement): + return bool(self.class_ == other.class_ and self.types == other.types) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.types)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SchemaDefRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "SchemaDefRequirement": + raise ValidationException("Not a SchemaDefRequirement") + + try: + types = load_field( + _doc.get("types"), + array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `types` field is not valid because:", + SourceLine(_doc, "types", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `types`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SchemaDefRequirement'", None, _errors__) + _constructed = cls( + types=types, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "SchemaDefRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.types is not None and "types" not in r: + r["types"] = save( + self.types, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="types", + val=r.get("types"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "types"]) + + +class EnvironmentDef(Saveable): + """ + Define an environment variable that will be set in the runtime environment + by the workflow platform when executing the command line tool. May be the + result of executing an expression, such as getting a parameter from input. + + """ + + def __init__( + self, + envName: Any, + envValue: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.envName = envName + self.envValue = envValue + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnvironmentDef): + return bool( + self.envName == other.envName and self.envValue == other.envValue + ) + return False + + def __hash__(self) -> int: + return hash((self.envName, self.envValue)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "EnvironmentDef": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + envName = load_field( + _doc.get("envName"), + strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `envName` field is not valid because:", + SourceLine(_doc, "envName", str), + [e], + ) + ) + try: + envValue = load_field( + _doc.get("envValue"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `envValue` field is not valid because:", + SourceLine(_doc, "envValue", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `envName`, `envValue`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'EnvironmentDef'", None, _errors__) + _constructed = cls( + envName=envName, + envValue=envValue, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.envName is not None and "envName" not in r: + r["envName"] = save( + self.envName, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="envName", + val=r.get("envName"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.envValue is not None and "envValue" not in r: + r["envValue"] = save( + self.envValue, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="envValue", + val=r.get("envValue"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["envName", "envValue"]) + + +class CommandLineBinding(InputBinding): + """ + + When listed under `inputBinding` in the input schema, the term + "value" refers to the the corresponding value in the input object. For + binding objects listed in `CommandLineTool.arguments`, the term "value" + refers to the effective value after evaluating `valueFrom`. + + The binding behavior when building the command line depends on the data + type of the value. If there is a mismatch between the type described by + the input schema and the effective value, such as resulting from an + expression evaluation, an implementation must use the data type of the + effective value. + + - **string**: Add `prefix` and the string to the command line. + + - **number**: Add `prefix` and decimal representation to command line. + + - **boolean**: If true, add `prefix` to the command line. If false, add + nothing. + + - **File**: Add `prefix` and the value of + [`File.path`](#File) to the command line. + + - **Directory**: Add `prefix` and the value of + [`Directory.path`](#Directory) to the command line. + + - **array**: If `itemSeparator` is specified, add `prefix` and the join + the array into a single string with `itemSeparator` separating the + items. Otherwise first add `prefix`, then recursively process + individual elements. + If the array is empty, it does not add anything to command line. + + - **object**: Add `prefix` only, and recursively add object fields for + which `inputBinding` is specified. + + - **null**: Add nothing. + + """ + + def __init__( + self, + loadContents: Optional[Any] = None, + position: Optional[Any] = None, + prefix: Optional[Any] = None, + separate: Optional[Any] = None, + itemSeparator: Optional[Any] = None, + valueFrom: Optional[Any] = None, + shellQuote: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + self.position = position + self.prefix = prefix + self.separate = separate + self.itemSeparator = itemSeparator + self.valueFrom = valueFrom + self.shellQuote = shellQuote + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandLineBinding): + return bool( + self.loadContents == other.loadContents + and self.position == other.position + and self.prefix == other.prefix + and self.separate == other.separate + and self.itemSeparator == other.itemSeparator + and self.valueFrom == other.valueFrom + and self.shellQuote == other.shellQuote + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.loadContents, + self.position, + self.prefix, + self.separate, + self.itemSeparator, + self.valueFrom, + self.shellQuote, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandLineBinding": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "position" in _doc: + try: + position = load_field( + _doc.get("position"), + union_of_None_type_or_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `position` field is not valid because:", + SourceLine(_doc, "position", str), + [e], + ) + ) + else: + position = None + if "prefix" in _doc: + try: + prefix = load_field( + _doc.get("prefix"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `prefix` field is not valid because:", + SourceLine(_doc, "prefix", str), + [e], + ) + ) + else: + prefix = None + if "separate" in _doc: + try: + separate = load_field( + _doc.get("separate"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `separate` field is not valid because:", + SourceLine(_doc, "separate", str), + [e], + ) + ) + else: + separate = None + if "itemSeparator" in _doc: + try: + itemSeparator = load_field( + _doc.get("itemSeparator"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `itemSeparator` field is not valid because:", + SourceLine(_doc, "itemSeparator", str), + [e], + ) + ) + else: + itemSeparator = None + if "valueFrom" in _doc: + try: + valueFrom = load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [e], + ) + ) + else: + valueFrom = None + if "shellQuote" in _doc: + try: + shellQuote = load_field( + _doc.get("shellQuote"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `shellQuote` field is not valid because:", + SourceLine(_doc, "shellQuote", str), + [e], + ) + ) + else: + shellQuote = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `loadContents`, `position`, `prefix`, `separate`, `itemSeparator`, `valueFrom`, `shellQuote`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandLineBinding'", None, _errors__) + _constructed = cls( + loadContents=loadContents, + position=position, + prefix=prefix, + separate=separate, + itemSeparator=itemSeparator, + valueFrom=valueFrom, + shellQuote=shellQuote, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.position is not None and "position" not in r: + r["position"] = save( + self.position, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="position", + val=r.get("position"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.prefix is not None and "prefix" not in r: + r["prefix"] = save( + self.prefix, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="prefix", + val=r.get("prefix"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.separate is not None and "separate" not in r: + r["separate"] = save( + self.separate, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="separate", + val=r.get("separate"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.itemSeparator is not None and "itemSeparator" not in r: + r["itemSeparator"] = save( + self.itemSeparator, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="itemSeparator", + val=r.get("itemSeparator"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.valueFrom is not None and "valueFrom" not in r: + r["valueFrom"] = save( + self.valueFrom, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="valueFrom", + val=r.get("valueFrom"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.shellQuote is not None and "shellQuote" not in r: + r["shellQuote"] = save( + self.shellQuote, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="shellQuote", + val=r.get("shellQuote"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "loadContents", + "position", + "prefix", + "separate", + "itemSeparator", + "valueFrom", + "shellQuote", + ] + ) + + +class CommandOutputBinding(OutputBinding): + """ + Describes how to generate an output parameter based on the files produced + by a CommandLineTool. + + The output parameter value is generated by applying these operations in the + following order: + + - glob + - loadContents + - outputEval + - secondaryFiles + + """ + + def __init__( + self, + glob: Optional[Any] = None, + loadContents: Optional[Any] = None, + outputEval: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.glob = glob + self.loadContents = loadContents + self.outputEval = outputEval + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputBinding): + return bool( + self.glob == other.glob + and self.loadContents == other.loadContents + and self.outputEval == other.outputEval + ) + return False + + def __hash__(self) -> int: + return hash((self.glob, self.loadContents, self.outputEval)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputBinding": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "glob" in _doc: + try: + glob = load_field( + _doc.get("glob"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `glob` field is not valid because:", + SourceLine(_doc, "glob", str), + [e], + ) + ) + else: + glob = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "outputEval" in _doc: + try: + outputEval = load_field( + _doc.get("outputEval"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputEval` field is not valid because:", + SourceLine(_doc, "outputEval", str), + [e], + ) + ) + else: + outputEval = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `glob`, `loadContents`, `outputEval`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandOutputBinding'", None, _errors__) + _constructed = cls( + glob=glob, + loadContents=loadContents, + outputEval=outputEval, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.glob is not None and "glob" not in r: + r["glob"] = save( + self.glob, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="glob", + val=r.get("glob"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputEval is not None and "outputEval" not in r: + r["outputEval"] = save( + self.outputEval, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputEval", + val=r.get("outputEval"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["glob", "loadContents", "outputEval"]) + + +class CommandInputRecordField(InputRecordField): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + inputBinding: Optional[Any] = None, + label: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.doc = doc + self.type = type + self.inputBinding = inputBinding + self.label = label + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputRecordField): + return bool( + self.name == other.name + and self.doc == other.doc + and self.type == other.type + and self.inputBinding == other.inputBinding + and self.label == other.label + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.doc, self.type, self.inputBinding, self.label)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `doc`, `type`, `inputBinding`, `label`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputRecordField'", None, _errors__ + ) + _constructed = cls( + name=name, + doc=doc, + type=type, + inputBinding=inputBinding, + label=label, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "doc", "type", "inputBinding", "label"]) + + +class CommandInputRecordSchema(InputRecordSchema): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type, self.label, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_symbolLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputRecordSchema'", None, _errors__ + ) + _constructed = cls( + fields=fields, + type=type, + label=label, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "name"]) + + +class CommandInputEnumSchema(InputEnumSchema): + def __init__( + self, + symbols: Any, + type: Any, + label: Optional[Any] = None, + name: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.symbols = symbols + self.type = type + self.label = label + self.name = name + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputEnumSchema): + return bool( + self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.name == other.name + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash((self.symbols, self.type, self.label, self.name, self.inputBinding)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_symbolLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `symbols`, `type`, `label`, `name`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputEnumSchema'", None, _errors__ + ) + _constructed = cls( + symbols=symbols, + type=type, + label=label, + name=name, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["symbols", "type", "label", "name", "inputBinding"]) + + +class CommandInputArraySchema(InputArraySchema): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type, self.label, self.inputBinding)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + items = load_field( + _doc.get("items"), + typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_symbolLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputArraySchema'", None, _errors__ + ) + _constructed = cls( + items=items, + type=type, + label=label, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.items is not None and "items" not in r: + r["items"] = save( + self.items, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "inputBinding"]) + + +class CommandOutputRecordField(OutputRecordField): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + outputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.doc = doc + self.type = type + self.outputBinding = outputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputRecordField): + return bool( + self.name == other.name + and self.doc == other.doc + and self.type == other.type + and self.outputBinding == other.outputBinding + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.doc, self.type, self.outputBinding)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `doc`, `type`, `outputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputRecordField'", None, _errors__ + ) + _constructed = cls( + name=name, + doc=doc, + type=type, + outputBinding=outputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "doc", "type", "outputBinding"]) + + +class CommandOutputRecordSchema(OutputRecordSchema): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type, self.label, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_symbolLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputRecordSchema'", None, _errors__ + ) + _constructed = cls( + fields=fields, + type=type, + label=label, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "name"]) + + +class CommandOutputEnumSchema(OutputEnumSchema): + def __init__( + self, + symbols: Any, + type: Any, + label: Optional[Any] = None, + outputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.symbols = symbols + self.type = type + self.label = label + self.outputBinding = outputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputEnumSchema): + return bool( + self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.outputBinding == other.outputBinding + ) + return False + + def __hash__(self) -> int: + return hash((self.symbols, self.type, self.label, self.outputBinding)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_symbolLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `symbols`, `type`, `label`, `outputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputEnumSchema'", None, _errors__ + ) + _constructed = cls( + symbols=symbols, + type=type, + label=label, + outputBinding=outputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) + r["symbols"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["symbols", "type", "label", "outputBinding"]) + + +class CommandOutputArraySchema(OutputArraySchema): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + outputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.outputBinding = outputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.outputBinding == other.outputBinding + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type, self.label, self.outputBinding)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + items = load_field( + _doc.get("items"), + typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_symbolLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `outputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputArraySchema'", None, _errors__ + ) + _constructed = cls( + items=items, + type=type, + label=label, + outputBinding=outputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.items is not None and "items" not in r: + r["items"] = save( + self.items, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "outputBinding"]) + + +class CommandInputParameter(InputParameter): + """ + An input parameter for a CommandLineTool. + """ + + def __init__( + self, + id: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + format: Optional[Any] = None, + inputBinding: Optional[Any] = None, + default: Optional[Any] = None, + type: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.inputBinding = inputBinding + self.default = default + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.inputBinding == other.inputBinding + and self.default == other.default + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.inputBinding, + self.default, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + raise ValidationException("Missing id") + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + if "type" in _doc: + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + else: + type = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `inputBinding`, `default`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandInputParameter'", None, _errors__) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + inputBinding=inputBinding, + default=default, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "inputBinding", + "default", + "type", + ] + ) + + +class CommandOutputParameter(OutputParameter): + """ + An output parameter for a CommandLineTool. + """ + + def __init__( + self, + id: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + outputBinding: Optional[Any] = None, + format: Optional[Any] = None, + type: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.outputBinding = outputBinding + self.format = format + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.outputBinding == other.outputBinding + and self.format == other.format + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.outputBinding, + self.format, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + raise ValidationException("Missing id") + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "type" in _doc: + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + else: + type = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + outputBinding=outputBinding, + format=format, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "outputBinding", + "format", + "type", + ] + ) + + +class CommandLineTool(Process): + """ + This defines the schema of the CWL Command Line Tool Description document. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + id: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + baseCommand: Optional[Any] = None, + arguments: Optional[Any] = None, + stdin: Optional[Any] = None, + stderr: Optional[Any] = None, + stdout: Optional[Any] = None, + successCodes: Optional[Any] = None, + temporaryFailCodes: Optional[Any] = None, + permanentFailCodes: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.label = label + self.doc = doc + self.cwlVersion = cwlVersion + self.class_ = "CommandLineTool" + self.baseCommand = baseCommand + self.arguments = arguments + self.stdin = stdin + self.stderr = stderr + self.stdout = stdout + self.successCodes = successCodes + self.temporaryFailCodes = temporaryFailCodes + self.permanentFailCodes = permanentFailCodes + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandLineTool): + return bool( + self.id == other.id + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.label == other.label + and self.doc == other.doc + and self.cwlVersion == other.cwlVersion + and self.class_ == other.class_ + and self.baseCommand == other.baseCommand + and self.arguments == other.arguments + and self.stdin == other.stdin + and self.stderr == other.stderr + and self.stdout == other.stdout + and self.successCodes == other.successCodes + and self.temporaryFailCodes == other.temporaryFailCodes + and self.permanentFailCodes == other.permanentFailCodes + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.label, + self.doc, + self.cwlVersion, + self.class_, + self.baseCommand, + self.arguments, + self.stdin, + self.stderr, + self.stdout, + self.successCodes, + self.temporaryFailCodes, + self.permanentFailCodes, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandLineTool": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "CommandLineTool": + raise ValidationException("Not a CommandLineTool") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_CommandInputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_CommandOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + if "baseCommand" in _doc: + try: + baseCommand = load_field( + _doc.get("baseCommand"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `baseCommand` field is not valid because:", + SourceLine(_doc, "baseCommand", str), + [e], + ) + ) + else: + baseCommand = None + if "arguments" in _doc: + try: + arguments = load_field( + _doc.get("arguments"), + union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `arguments` field is not valid because:", + SourceLine(_doc, "arguments", str), + [e], + ) + ) + else: + arguments = None + if "stdin" in _doc: + try: + stdin = load_field( + _doc.get("stdin"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `stdin` field is not valid because:", + SourceLine(_doc, "stdin", str), + [e], + ) + ) + else: + stdin = None + if "stderr" in _doc: + try: + stderr = load_field( + _doc.get("stderr"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `stderr` field is not valid because:", + SourceLine(_doc, "stderr", str), + [e], + ) + ) + else: + stderr = None + if "stdout" in _doc: + try: + stdout = load_field( + _doc.get("stdout"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `stdout` field is not valid because:", + SourceLine(_doc, "stdout", str), + [e], + ) + ) + else: + stdout = None + if "successCodes" in _doc: + try: + successCodes = load_field( + _doc.get("successCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `successCodes` field is not valid because:", + SourceLine(_doc, "successCodes", str), + [e], + ) + ) + else: + successCodes = None + if "temporaryFailCodes" in _doc: + try: + temporaryFailCodes = load_field( + _doc.get("temporaryFailCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `temporaryFailCodes` field is not valid because:", + SourceLine(_doc, "temporaryFailCodes", str), + [e], + ) + ) + else: + temporaryFailCodes = None + if "permanentFailCodes" in _doc: + try: + permanentFailCodes = load_field( + _doc.get("permanentFailCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `permanentFailCodes` field is not valid because:", + SourceLine(_doc, "permanentFailCodes", str), + [e], + ) + ) + else: + permanentFailCodes = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `inputs`, `outputs`, `requirements`, `hints`, `label`, `doc`, `cwlVersion`, `class`, `baseCommand`, `arguments`, `stdin`, `stderr`, `stdout`, `successCodes`, `temporaryFailCodes`, `permanentFailCodes`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandLineTool'", None, _errors__) + _constructed = cls( + id=id, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + label=label, + doc=doc, + cwlVersion=cwlVersion, + baseCommand=baseCommand, + arguments=arguments, + stdin=stdin, + stderr=stderr, + stdout=stdout, + successCodes=successCodes, + temporaryFailCodes=temporaryFailCodes, + permanentFailCodes=permanentFailCodes, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "CommandLineTool" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.baseCommand is not None and "baseCommand" not in r: + r["baseCommand"] = save( + self.baseCommand, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="baseCommand", + val=r.get("baseCommand"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.arguments is not None and "arguments" not in r: + r["arguments"] = save( + self.arguments, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="arguments", + val=r.get("arguments"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.stdin is not None and "stdin" not in r: + r["stdin"] = save( + self.stdin, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="stdin", + val=r.get("stdin"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.stderr is not None and "stderr" not in r: + r["stderr"] = save( + self.stderr, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="stderr", + val=r.get("stderr"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.stdout is not None and "stdout" not in r: + r["stdout"] = save( + self.stdout, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="stdout", + val=r.get("stdout"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.successCodes is not None and "successCodes" not in r: + r["successCodes"] = save( + self.successCodes, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="successCodes", + val=r.get("successCodes"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.temporaryFailCodes is not None and "temporaryFailCodes" not in r: + r["temporaryFailCodes"] = save( + self.temporaryFailCodes, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="temporaryFailCodes", + val=r.get("temporaryFailCodes"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.permanentFailCodes is not None and "permanentFailCodes" not in r: + r["permanentFailCodes"] = save( + self.permanentFailCodes, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="permanentFailCodes", + val=r.get("permanentFailCodes"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "inputs", + "outputs", + "requirements", + "hints", + "label", + "doc", + "cwlVersion", + "class", + "baseCommand", + "arguments", + "stdin", + "stderr", + "stdout", + "successCodes", + "temporaryFailCodes", + "permanentFailCodes", + ] + ) + + +class DockerRequirement(ProcessRequirement): + """ + Indicates that a workflow component should be run in a + [Docker](http://docker.com) container, and specifies how to fetch or build + the image. + + If a CommandLineTool lists `DockerRequirement` under + `hints` (or `requirements`), it may (or must) be run in the specified Docker + container. + + The platform must first acquire or install the correct Docker image as + specified by `dockerPull`, `dockerImport`, `dockerLoad` or `dockerFile`. + + The platform must execute the tool in the container using `docker run` with + the appropriate Docker image and tool command line. + + The workflow platform may provide input files and the designated output + directory through the use of volume bind mounts. The platform should rewrite + file paths in the input object to correspond to the Docker bind mounted + locations. That is, the platform should rewrite values in the parameter context + such as `runtime.outdir`, `runtime.tmpdir` and others to be valid paths + within the container. + + When running a tool contained in Docker, the workflow platform must not + assume anything about the contents of the Docker container, such as the + presence or absence of specific software, except to assume that the + generated command line represents a valid command within the runtime + environment of the container. + + ## Interaction with other requirements + + If [EnvVarRequirement](#EnvVarRequirement) is specified alongside a + DockerRequirement, the environment variables must be provided to Docker + using `--env` or `--env-file` and interact with the container's preexisting + environment as defined by Docker. + + """ + + def __init__( + self, + dockerPull: Optional[Any] = None, + dockerLoad: Optional[Any] = None, + dockerFile: Optional[Any] = None, + dockerImport: Optional[Any] = None, + dockerImageId: Optional[Any] = None, + dockerOutputDirectory: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "DockerRequirement" + self.dockerPull = dockerPull + self.dockerLoad = dockerLoad + self.dockerFile = dockerFile + self.dockerImport = dockerImport + self.dockerImageId = dockerImageId + self.dockerOutputDirectory = dockerOutputDirectory + + def __eq__(self, other: Any) -> bool: + if isinstance(other, DockerRequirement): + return bool( + self.class_ == other.class_ + and self.dockerPull == other.dockerPull + and self.dockerLoad == other.dockerLoad + and self.dockerFile == other.dockerFile + and self.dockerImport == other.dockerImport + and self.dockerImageId == other.dockerImageId + and self.dockerOutputDirectory == other.dockerOutputDirectory + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.dockerPull, + self.dockerLoad, + self.dockerFile, + self.dockerImport, + self.dockerImageId, + self.dockerOutputDirectory, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "DockerRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "DockerRequirement": + raise ValidationException("Not a DockerRequirement") + + if "dockerPull" in _doc: + try: + dockerPull = load_field( + _doc.get("dockerPull"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerPull` field is not valid because:", + SourceLine(_doc, "dockerPull", str), + [e], + ) + ) + else: + dockerPull = None + if "dockerLoad" in _doc: + try: + dockerLoad = load_field( + _doc.get("dockerLoad"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerLoad` field is not valid because:", + SourceLine(_doc, "dockerLoad", str), + [e], + ) + ) + else: + dockerLoad = None + if "dockerFile" in _doc: + try: + dockerFile = load_field( + _doc.get("dockerFile"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerFile` field is not valid because:", + SourceLine(_doc, "dockerFile", str), + [e], + ) + ) + else: + dockerFile = None + if "dockerImport" in _doc: + try: + dockerImport = load_field( + _doc.get("dockerImport"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerImport` field is not valid because:", + SourceLine(_doc, "dockerImport", str), + [e], + ) + ) + else: + dockerImport = None + if "dockerImageId" in _doc: + try: + dockerImageId = load_field( + _doc.get("dockerImageId"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerImageId` field is not valid because:", + SourceLine(_doc, "dockerImageId", str), + [e], + ) + ) + else: + dockerImageId = None + if "dockerOutputDirectory" in _doc: + try: + dockerOutputDirectory = load_field( + _doc.get("dockerOutputDirectory"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerOutputDirectory` field is not valid because:", + SourceLine(_doc, "dockerOutputDirectory", str), + [e], + ) + ) + else: + dockerOutputDirectory = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `dockerPull`, `dockerLoad`, `dockerFile`, `dockerImport`, `dockerImageId`, `dockerOutputDirectory`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'DockerRequirement'", None, _errors__) + _constructed = cls( + dockerPull=dockerPull, + dockerLoad=dockerLoad, + dockerFile=dockerFile, + dockerImport=dockerImport, + dockerImageId=dockerImageId, + dockerOutputDirectory=dockerOutputDirectory, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "DockerRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.dockerPull is not None and "dockerPull" not in r: + r["dockerPull"] = save( + self.dockerPull, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerPull", + val=r.get("dockerPull"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dockerLoad is not None and "dockerLoad" not in r: + r["dockerLoad"] = save( + self.dockerLoad, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerLoad", + val=r.get("dockerLoad"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dockerFile is not None and "dockerFile" not in r: + r["dockerFile"] = save( + self.dockerFile, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerFile", + val=r.get("dockerFile"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dockerImport is not None and "dockerImport" not in r: + r["dockerImport"] = save( + self.dockerImport, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerImport", + val=r.get("dockerImport"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dockerImageId is not None and "dockerImageId" not in r: + r["dockerImageId"] = save( + self.dockerImageId, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerImageId", + val=r.get("dockerImageId"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dockerOutputDirectory is not None and "dockerOutputDirectory" not in r: + r["dockerOutputDirectory"] = save( + self.dockerOutputDirectory, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerOutputDirectory", + val=r.get("dockerOutputDirectory"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "dockerPull", + "dockerLoad", + "dockerFile", + "dockerImport", + "dockerImageId", + "dockerOutputDirectory", + ] + ) + + +class SoftwareRequirement(ProcessRequirement): + """ + A list of software packages that should be configured in the environment of + the defined process. + + """ + + def __init__( + self, + packages: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SoftwareRequirement" + self.packages = packages + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SoftwareRequirement): + return bool(self.class_ == other.class_ and self.packages == other.packages) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.packages)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SoftwareRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "SoftwareRequirement": + raise ValidationException("Not a SoftwareRequirement") + + try: + packages = load_field( + _doc.get("packages"), + idmap_packages_array_of_SoftwarePackageLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `packages` field is not valid because:", + SourceLine(_doc, "packages", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `packages`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SoftwareRequirement'", None, _errors__) + _constructed = cls( + packages=packages, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "SoftwareRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.packages is not None and "packages" not in r: + r["packages"] = save( + self.packages, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="packages", + val=r.get("packages"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "packages"]) + + +class SoftwarePackage(Saveable): + def __init__( + self, + package: Any, + version: Optional[Any] = None, + specs: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.package = package + self.version = version + self.specs = specs + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SoftwarePackage): + return bool( + self.package == other.package + and self.version == other.version + and self.specs == other.specs + ) + return False + + def __hash__(self) -> int: + return hash((self.package, self.version, self.specs)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SoftwarePackage": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + package = load_field( + _doc.get("package"), + strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `package` field is not valid because:", + SourceLine(_doc, "package", str), + [e], + ) + ) + if "version" in _doc: + try: + version = load_field( + _doc.get("version"), + union_of_None_type_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `version` field is not valid because:", + SourceLine(_doc, "version", str), + [e], + ) + ) + else: + version = None + if "specs" in _doc: + try: + specs = load_field( + _doc.get("specs"), + uri_union_of_None_type_or_array_of_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `specs` field is not valid because:", + SourceLine(_doc, "specs", str), + [e], + ) + ) + else: + specs = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `package`, `version`, `specs`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SoftwarePackage'", None, _errors__) + _constructed = cls( + package=package, + version=version, + specs=specs, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.package is not None and "package" not in r: + r["package"] = save( + self.package, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="package", + val=r.get("package"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.version is not None and "version" not in r: + r["version"] = save( + self.version, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="version", + val=r.get("version"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.specs is not None and "specs" not in r: + u = save_relative_uri(self.specs, base_url, False, None, relative_uris) + r["specs"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="specs", + val=r.get("specs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["package", "version", "specs"]) + + +class Dirent(Saveable): + """ + Define a file or subdirectory that must be placed in the designated output + directory prior to executing the command line tool. May be the result of + executing an expression, such as building a configuration file from a + template. + + """ + + def __init__( + self, + entry: Any, + entryname: Optional[Any] = None, + writable: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.entryname = entryname + self.entry = entry + self.writable = writable + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Dirent): + return bool( + self.entryname == other.entryname + and self.entry == other.entry + and self.writable == other.writable + ) + return False + + def __hash__(self) -> int: + return hash((self.entryname, self.entry, self.writable)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Dirent": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "entryname" in _doc: + try: + entryname = load_field( + _doc.get("entryname"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `entryname` field is not valid because:", + SourceLine(_doc, "entryname", str), + [e], + ) + ) + else: + entryname = None + try: + entry = load_field( + _doc.get("entry"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `entry` field is not valid because:", + SourceLine(_doc, "entry", str), + [e], + ) + ) + if "writable" in _doc: + try: + writable = load_field( + _doc.get("writable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `writable` field is not valid because:", + SourceLine(_doc, "writable", str), + [e], + ) + ) + else: + writable = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `entryname`, `entry`, `writable`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Dirent'", None, _errors__) + _constructed = cls( + entryname=entryname, + entry=entry, + writable=writable, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.entryname is not None and "entryname" not in r: + r["entryname"] = save( + self.entryname, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="entryname", + val=r.get("entryname"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.entry is not None and "entry" not in r: + r["entry"] = save( + self.entry, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="entry", + val=r.get("entry"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.writable is not None and "writable" not in r: + r["writable"] = save( + self.writable, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="writable", + val=r.get("writable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["entryname", "entry", "writable"]) + + +class InitialWorkDirRequirement(ProcessRequirement): + """ + Define a list of files and subdirectories that must be created by the workflow platform in the designated output directory prior to executing the command line tool. + """ + + def __init__( + self, + listing: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "InitialWorkDirRequirement" + self.listing = listing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InitialWorkDirRequirement): + return bool(self.class_ == other.class_ and self.listing == other.listing) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.listing)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InitialWorkDirRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "InitialWorkDirRequirement": + raise ValidationException("Not a InitialWorkDirRequirement") + + try: + listing = load_field( + _doc.get("listing"), + union_of_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `listing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'InitialWorkDirRequirement'", None, _errors__ + ) + _constructed = cls( + listing=listing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "InitialWorkDirRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.listing is not None and "listing" not in r: + r["listing"] = save( + self.listing, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="listing", + val=r.get("listing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "listing"]) + + +class EnvVarRequirement(ProcessRequirement): + """ + Define a list of environment variables which will be set in the + execution environment of the tool. See `EnvironmentDef` for details. + + """ + + def __init__( + self, + envDef: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "EnvVarRequirement" + self.envDef = envDef + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnvVarRequirement): + return bool(self.class_ == other.class_ and self.envDef == other.envDef) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.envDef)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "EnvVarRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "EnvVarRequirement": + raise ValidationException("Not a EnvVarRequirement") + + try: + envDef = load_field( + _doc.get("envDef"), + idmap_envDef_array_of_EnvironmentDefLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `envDef` field is not valid because:", + SourceLine(_doc, "envDef", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `envDef`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'EnvVarRequirement'", None, _errors__) + _constructed = cls( + envDef=envDef, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "EnvVarRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.envDef is not None and "envDef" not in r: + r["envDef"] = save( + self.envDef, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="envDef", + val=r.get("envDef"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "envDef"]) + + +class ShellCommandRequirement(ProcessRequirement): + """ + Modify the behavior of CommandLineTool to generate a single string + containing a shell command line. Each item in the argument list must be + joined into a string separated by single spaces and quoted to prevent + intepretation by the shell, unless `CommandLineBinding` for that argument + contains `shellQuote: false`. If `shellQuote: false` is specified, the + argument is joined into the command string without quoting, which allows + the use of shell metacharacters such as `|` for pipes. + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ShellCommandRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ShellCommandRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ShellCommandRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ShellCommandRequirement": + raise ValidationException("Not a ShellCommandRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'ShellCommandRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ShellCommandRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class ResourceRequirement(ProcessRequirement): + """ + Specify basic hardware resource requirements. + + "min" is the minimum amount of a resource that must be reserved to schedule + a job. If "min" cannot be satisfied, the job should not be run. + + "max" is the maximum amount of a resource that the job shall be permitted + to use. If a node has sufficient resources, multiple jobs may be scheduled + on a single node provided each job's "max" resource requirements are + met. If a job attempts to exceed its "max" resource allocation, an + implementation may deny additional resources, which may result in job + failure. + + If "min" is specified but "max" is not, then "max" == "min" + If "max" is specified by "min" is not, then "min" == "max". + + It is an error if max < min. + + It is an error if the value of any of these fields is negative. + + If neither "min" nor "max" is specified for a resource, an implementation may provide a default. + + """ + + def __init__( + self, + coresMin: Optional[Any] = None, + coresMax: Optional[Any] = None, + ramMin: Optional[Any] = None, + ramMax: Optional[Any] = None, + tmpdirMin: Optional[Any] = None, + tmpdirMax: Optional[Any] = None, + outdirMin: Optional[Any] = None, + outdirMax: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ResourceRequirement" + self.coresMin = coresMin + self.coresMax = coresMax + self.ramMin = ramMin + self.ramMax = ramMax + self.tmpdirMin = tmpdirMin + self.tmpdirMax = tmpdirMax + self.outdirMin = outdirMin + self.outdirMax = outdirMax + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ResourceRequirement): + return bool( + self.class_ == other.class_ + and self.coresMin == other.coresMin + and self.coresMax == other.coresMax + and self.ramMin == other.ramMin + and self.ramMax == other.ramMax + and self.tmpdirMin == other.tmpdirMin + and self.tmpdirMax == other.tmpdirMax + and self.outdirMin == other.outdirMin + and self.outdirMax == other.outdirMax + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.coresMin, + self.coresMax, + self.ramMin, + self.ramMax, + self.tmpdirMin, + self.tmpdirMax, + self.outdirMin, + self.outdirMax, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ResourceRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ResourceRequirement": + raise ValidationException("Not a ResourceRequirement") + + if "coresMin" in _doc: + try: + coresMin = load_field( + _doc.get("coresMin"), + union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `coresMin` field is not valid because:", + SourceLine(_doc, "coresMin", str), + [e], + ) + ) + else: + coresMin = None + if "coresMax" in _doc: + try: + coresMax = load_field( + _doc.get("coresMax"), + union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `coresMax` field is not valid because:", + SourceLine(_doc, "coresMax", str), + [e], + ) + ) + else: + coresMax = None + if "ramMin" in _doc: + try: + ramMin = load_field( + _doc.get("ramMin"), + union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `ramMin` field is not valid because:", + SourceLine(_doc, "ramMin", str), + [e], + ) + ) + else: + ramMin = None + if "ramMax" in _doc: + try: + ramMax = load_field( + _doc.get("ramMax"), + union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `ramMax` field is not valid because:", + SourceLine(_doc, "ramMax", str), + [e], + ) + ) + else: + ramMax = None + if "tmpdirMin" in _doc: + try: + tmpdirMin = load_field( + _doc.get("tmpdirMin"), + union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `tmpdirMin` field is not valid because:", + SourceLine(_doc, "tmpdirMin", str), + [e], + ) + ) + else: + tmpdirMin = None + if "tmpdirMax" in _doc: + try: + tmpdirMax = load_field( + _doc.get("tmpdirMax"), + union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `tmpdirMax` field is not valid because:", + SourceLine(_doc, "tmpdirMax", str), + [e], + ) + ) + else: + tmpdirMax = None + if "outdirMin" in _doc: + try: + outdirMin = load_field( + _doc.get("outdirMin"), + union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outdirMin` field is not valid because:", + SourceLine(_doc, "outdirMin", str), + [e], + ) + ) + else: + outdirMin = None + if "outdirMax" in _doc: + try: + outdirMax = load_field( + _doc.get("outdirMax"), + union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outdirMax` field is not valid because:", + SourceLine(_doc, "outdirMax", str), + [e], + ) + ) + else: + outdirMax = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `coresMin`, `coresMax`, `ramMin`, `ramMax`, `tmpdirMin`, `tmpdirMax`, `outdirMin`, `outdirMax`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ResourceRequirement'", None, _errors__) + _constructed = cls( + coresMin=coresMin, + coresMax=coresMax, + ramMin=ramMin, + ramMax=ramMax, + tmpdirMin=tmpdirMin, + tmpdirMax=tmpdirMax, + outdirMin=outdirMin, + outdirMax=outdirMax, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ResourceRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.coresMin is not None and "coresMin" not in r: + r["coresMin"] = save( + self.coresMin, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="coresMin", + val=r.get("coresMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.coresMax is not None and "coresMax" not in r: + r["coresMax"] = save( + self.coresMax, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="coresMax", + val=r.get("coresMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.ramMin is not None and "ramMin" not in r: + r["ramMin"] = save( + self.ramMin, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="ramMin", + val=r.get("ramMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.ramMax is not None and "ramMax" not in r: + r["ramMax"] = save( + self.ramMax, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="ramMax", + val=r.get("ramMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.tmpdirMin is not None and "tmpdirMin" not in r: + r["tmpdirMin"] = save( + self.tmpdirMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="tmpdirMin", + val=r.get("tmpdirMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.tmpdirMax is not None and "tmpdirMax" not in r: + r["tmpdirMax"] = save( + self.tmpdirMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="tmpdirMax", + val=r.get("tmpdirMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outdirMin is not None and "outdirMin" not in r: + r["outdirMin"] = save( + self.outdirMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outdirMin", + val=r.get("outdirMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outdirMax is not None and "outdirMax" not in r: + r["outdirMax"] = save( + self.outdirMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outdirMax", + val=r.get("outdirMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "coresMin", + "coresMax", + "ramMin", + "ramMax", + "tmpdirMin", + "tmpdirMax", + "outdirMin", + "outdirMax", + ] + ) + + +class ExpressionToolOutputParameter(OutputParameter): + def __init__( + self, + id: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + outputBinding: Optional[Any] = None, + format: Optional[Any] = None, + type: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.outputBinding = outputBinding + self.format = format + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ExpressionToolOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.outputBinding == other.outputBinding + and self.format == other.format + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.outputBinding, + self.format, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ExpressionToolOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + raise ValidationException("Missing id") + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "type" in _doc: + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + else: + type = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'ExpressionToolOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + outputBinding=outputBinding, + format=format, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "outputBinding", + "format", + "type", + ] + ) + + +class ExpressionTool(Process): + """ + Execute an expression as a Workflow step. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + expression: Any, + id: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.label = label + self.doc = doc + self.cwlVersion = cwlVersion + self.class_ = "ExpressionTool" + self.expression = expression + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ExpressionTool): + return bool( + self.id == other.id + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.label == other.label + and self.doc == other.doc + and self.cwlVersion == other.cwlVersion + and self.class_ == other.class_ + and self.expression == other.expression + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.label, + self.doc, + self.cwlVersion, + self.class_, + self.expression, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ExpressionTool": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ExpressionTool": + raise ValidationException("Not a ExpressionTool") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_InputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_ExpressionToolOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + try: + expression = load_field( + _doc.get("expression"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `expression` field is not valid because:", + SourceLine(_doc, "expression", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `inputs`, `outputs`, `requirements`, `hints`, `label`, `doc`, `cwlVersion`, `class`, `expression`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ExpressionTool'", None, _errors__) + _constructed = cls( + id=id, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + label=label, + doc=doc, + cwlVersion=cwlVersion, + expression=expression, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ExpressionTool" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.expression is not None and "expression" not in r: + r["expression"] = save( + self.expression, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="expression", + val=r.get("expression"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "inputs", + "outputs", + "requirements", + "hints", + "label", + "doc", + "cwlVersion", + "class", + "expression", + ] + ) + + +class WorkflowOutputParameter(OutputParameter): + """ + Describe an output parameter of a workflow. The parameter must be + connected to one or more parameters defined in the workflow that will + provide the value of the output parameter. + + """ + + def __init__( + self, + id: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + outputBinding: Optional[Any] = None, + format: Optional[Any] = None, + outputSource: Optional[Any] = None, + linkMerge: Optional[Any] = None, + type: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.outputBinding = outputBinding + self.format = format + self.outputSource = outputSource + self.linkMerge = linkMerge + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.outputBinding == other.outputBinding + and self.format == other.format + and self.outputSource == other.outputSource + and self.linkMerge == other.linkMerge + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.outputBinding, + self.format, + self.outputSource, + self.linkMerge, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + raise ValidationException("Missing id") + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "outputSource" in _doc: + try: + outputSource = load_field( + _doc.get("outputSource"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputSource` field is not valid because:", + SourceLine(_doc, "outputSource", str), + [e], + ) + ) + else: + outputSource = None + if "linkMerge" in _doc: + try: + linkMerge = load_field( + _doc.get("linkMerge"), + union_of_None_type_or_LinkMergeMethodLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [e], + ) + ) + else: + linkMerge = None + if "type" in _doc: + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + else: + type = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`, `outputSource`, `linkMerge`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'WorkflowOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + outputBinding=outputBinding, + format=format, + outputSource=outputSource, + linkMerge=linkMerge, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputSource is not None and "outputSource" not in r: + u = save_relative_uri( + self.outputSource, str(self.id), False, 1, relative_uris + ) + r["outputSource"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputSource", + val=r.get("outputSource"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.linkMerge is not None and "linkMerge" not in r: + r["linkMerge"] = save( + self.linkMerge, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="linkMerge", + val=r.get("linkMerge"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "outputBinding", + "format", + "outputSource", + "linkMerge", + "type", + ] + ) + + +class Sink(Saveable): + pass + + +class WorkflowStepInput(Sink): + """ + The input of a workflow step connects an upstream parameter (from the + workflow inputs, or the outputs of other workflows steps) with the input + parameters of the underlying step. + + ## Input object + + A WorkflowStepInput object must contain an `id` field in the form + `#fieldname` or `#prefix/fieldname`. When the `id` field contains a slash + `/` the field name consists of the characters following the final slash + (the prefix portion may contain one or more slashes to indicate scope). + This defines a field of the workflow step input object with the value of + the `source` parameter(s). + + ## Merging + + To merge multiple inbound data links, + [MultipleInputFeatureRequirement](#MultipleInputFeatureRequirement) must be specified + in the workflow or workflow step requirements. + + If the sink parameter is an array, or named in a [workflow + scatter](#WorkflowStep) operation, there may be multiple inbound data links + listed in the `source` field. The values from the input links are merged + depending on the method specified in the `linkMerge` field. If not + specified, the default method is "merge_nested". + + * **merge_nested** + + The input must be an array consisting of exactly one entry for each + input link. If "merge_nested" is specified with a single link, the value + from the link must be wrapped in a single-item list. + + * **merge_flattened** + + 1. The source and sink parameters must be compatible types, or the source + type must be compatible with single element from the "items" type of + the destination array parameter. + 2. Source parameters which are arrays are concatenated. + Source parameters which are single element types are appended as + single elements. + + """ + + def __init__( + self, + id: Any, + source: Optional[Any] = None, + linkMerge: Optional[Any] = None, + default: Optional[Any] = None, + valueFrom: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.source = source + self.linkMerge = linkMerge + self.id = id + self.default = default + self.valueFrom = valueFrom + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStepInput): + return bool( + self.source == other.source + and self.linkMerge == other.linkMerge + and self.id == other.id + and self.default == other.default + and self.valueFrom == other.valueFrom + ) + return False + + def __hash__(self) -> int: + return hash( + (self.source, self.linkMerge, self.id, self.default, self.valueFrom) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowStepInput": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + raise ValidationException("Missing id") + if not __original_id_is_none: + baseuri = id + if "source" in _doc: + try: + source = load_field( + _doc.get("source"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `source` field is not valid because:", + SourceLine(_doc, "source", str), + [e], + ) + ) + else: + source = None + if "linkMerge" in _doc: + try: + linkMerge = load_field( + _doc.get("linkMerge"), + union_of_None_type_or_LinkMergeMethodLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [e], + ) + ) + else: + linkMerge = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + if "valueFrom" in _doc: + try: + valueFrom = load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [e], + ) + ) + else: + valueFrom = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `source`, `linkMerge`, `id`, `default`, `valueFrom`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkflowStepInput'", None, _errors__) + _constructed = cls( + source=source, + linkMerge=linkMerge, + id=id, + default=default, + valueFrom=valueFrom, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.source is not None and "source" not in r: + u = save_relative_uri(self.source, str(self.id), False, 2, relative_uris) + r["source"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="source", + val=r.get("source"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.linkMerge is not None and "linkMerge" not in r: + r["linkMerge"] = save( + self.linkMerge, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="linkMerge", + val=r.get("linkMerge"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.valueFrom is not None and "valueFrom" not in r: + r["valueFrom"] = save( + self.valueFrom, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="valueFrom", + val=r.get("valueFrom"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["source", "linkMerge", "id", "default", "valueFrom"]) + + +class WorkflowStepOutput(Saveable): + """ + Associate an output parameter of the underlying process with a workflow + parameter. The workflow parameter (given in the `id` field) be may be used + as a `source` to connect with input parameters of other workflow steps, or + with an output parameter of the process. + + """ + + def __init__( + self, + id: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStepOutput): + return bool(self.id == other.id) + return False + + def __hash__(self) -> int: + return hash((self.id)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowStepOutput": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + raise ValidationException("Missing id") + if not __original_id_is_none: + baseuri = id + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkflowStepOutput'", None, _errors__) + _constructed = cls( + id=id, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["id"]) + + +class WorkflowStep(Saveable): + """ + A workflow step is an executable element of a workflow. It specifies the + underlying process implementation (such as `CommandLineTool` or another + `Workflow`) in the `run` field and connects the input and output parameters + of the underlying process to workflow parameters. + + # Scatter/gather + + To use scatter/gather, + [ScatterFeatureRequirement](#ScatterFeatureRequirement) must be specified + in the workflow or workflow step requirements. + + A "scatter" operation specifies that the associated workflow step or + subworkflow should execute separately over a list of input elements. Each + job making up a scatter operation is independent and may be executed + concurrently. + + The `scatter` field specifies one or more input parameters which will be + scattered. An input parameter may be listed more than once. The declared + type of each input parameter is implicitly becomes an array of items of the + input parameter type. If a parameter is listed more than once, it becomes + a nested array. As a result, upstream parameters which are connected to + scattered parameters must be arrays. + + All output parameter types are also implicitly wrapped in arrays. Each job + in the scatter results in an entry in the output array. + + If any scattered parameter runtime value is an empty array, all outputs are + set to empty arrays and no work is done for the step, according to + applicable scattering rules. + + If `scatter` declares more than one input parameter, `scatterMethod` + describes how to decompose the input into a discrete set of jobs. + + * **dotproduct** specifies that each of the input arrays are aligned and one + element taken from each array to construct each job. It is an error + if all input arrays are not the same length. + + * **nested_crossproduct** specifies the Cartesian product of the inputs, + producing a job for every combination of the scattered inputs. The + output must be nested arrays for each level of scattering, in the + order that the input arrays are listed in the `scatter` field. + + * **flat_crossproduct** specifies the Cartesian product of the inputs, + producing a job for every combination of the scattered inputs. The + output arrays must be flattened to a single level, but otherwise listed in the + order that the input arrays are listed in the `scatter` field. + + # Subworkflows + + To specify a nested workflow as part of a workflow step, + [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) must be + specified in the workflow or workflow step requirements. + + It is a fatal error if a workflow directly or indirectly invokes itself as + a subworkflow (recursive workflows are not allowed). + + """ + + def __init__( + self, + id: Any, + in_: Any, + out: Any, + run: Any, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + scatter: Optional[Any] = None, + scatterMethod: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.in_ = in_ + self.out = out + self.requirements = requirements + self.hints = hints + self.label = label + self.doc = doc + self.run = run + self.scatter = scatter + self.scatterMethod = scatterMethod + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStep): + return bool( + self.id == other.id + and self.in_ == other.in_ + and self.out == other.out + and self.requirements == other.requirements + and self.hints == other.hints + and self.label == other.label + and self.doc == other.doc + and self.run == other.run + and self.scatter == other.scatter + and self.scatterMethod == other.scatterMethod + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.in_, + self.out, + self.requirements, + self.hints, + self.label, + self.doc, + self.run, + self.scatter, + self.scatterMethod, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowStep": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + raise ValidationException("Missing id") + if not __original_id_is_none: + baseuri = id + try: + in_ = load_field( + _doc.get("in"), + idmap_in__array_of_WorkflowStepInputLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `in` field is not valid because:", + SourceLine(_doc, "in", str), + [e], + ) + ) + try: + out = load_field( + _doc.get("out"), + uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `out` field is not valid because:", + SourceLine(_doc, "out", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + run = load_field( + _doc.get("run"), + uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `run` field is not valid because:", + SourceLine(_doc, "run", str), + [e], + ) + ) + if "scatter" in _doc: + try: + scatter = load_field( + _doc.get("scatter"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `scatter` field is not valid because:", + SourceLine(_doc, "scatter", str), + [e], + ) + ) + else: + scatter = None + if "scatterMethod" in _doc: + try: + scatterMethod = load_field( + _doc.get("scatterMethod"), + uri_union_of_None_type_or_ScatterMethodLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `scatterMethod` field is not valid because:", + SourceLine(_doc, "scatterMethod", str), + [e], + ) + ) + else: + scatterMethod = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `in`, `out`, `requirements`, `hints`, `label`, `doc`, `run`, `scatter`, `scatterMethod`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkflowStep'", None, _errors__) + _constructed = cls( + id=id, + in_=in_, + out=out, + requirements=requirements, + hints=hints, + label=label, + doc=doc, + run=run, + scatter=scatter, + scatterMethod=scatterMethod, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.in_ is not None and "in" not in r: + r["in"] = save( + self.in_, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="in", + val=r.get("in"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.out is not None and "out" not in r: + u = save_relative_uri(self.out, str(self.id), True, None, relative_uris) + r["out"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="out", + val=r.get("out"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.run is not None and "run" not in r: + u = save_relative_uri(self.run, str(self.id), False, None, relative_uris) + r["run"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="run", + val=r.get("run"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.scatter is not None and "scatter" not in r: + u = save_relative_uri(self.scatter, str(self.id), False, 0, relative_uris) + r["scatter"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="scatter", + val=r.get("scatter"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.scatterMethod is not None and "scatterMethod" not in r: + u = save_relative_uri( + self.scatterMethod, str(self.id), False, None, relative_uris + ) + r["scatterMethod"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="scatterMethod", + val=r.get("scatterMethod"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "in", + "out", + "requirements", + "hints", + "label", + "doc", + "run", + "scatter", + "scatterMethod", + ] + ) + + +class Workflow(Process): + """ + A workflow describes a set of **steps** and the **dependencies** between + those steps. When a step produces output that will be consumed by a + second step, the first step is a dependency of the second step. + + When there is a dependency, the workflow engine must execute the preceding + step and wait for it to successfully produce output before executing the + dependent step. If two steps are defined in the workflow graph that + are not directly or indirectly dependent, these steps are **independent**, + and may execute in any order or execute concurrently. A workflow is + complete when all steps have been executed. + + Dependencies between parameters are expressed using the `source` field on + [workflow step input parameters](#WorkflowStepInput) and [workflow output + parameters](#WorkflowOutputParameter). + + The `source` field expresses the dependency of one parameter on another + such that when a value is associated with the parameter specified by + `source`, that value is propagated to the destination parameter. When all + data links inbound to a given step are fufilled, the step is ready to + execute. + + ## Workflow success and failure + + A completed step must result in one of `success`, `temporaryFailure` or + `permanentFailure` states. An implementation may choose to retry a step + execution which resulted in `temporaryFailure`. An implementation may + choose to either continue running other steps of a workflow, or terminate + immediately upon `permanentFailure`. + + * If any step of a workflow execution results in `permanentFailure`, then + the workflow status is `permanentFailure`. + + * If one or more steps result in `temporaryFailure` and all other steps + complete `success` or are not executed, then the workflow status is + `temporaryFailure`. + + * If all workflow steps are executed and complete with `success`, then the + workflow status is `success`. + + # Extensions + + [ScatterFeatureRequirement](#ScatterFeatureRequirement) and + [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) are + available as standard [extensions](#Extensions_and_Metadata) to core + workflow semantics. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + steps: Any, + id: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.label = label + self.doc = doc + self.cwlVersion = cwlVersion + self.class_ = "Workflow" + self.steps = steps + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Workflow): + return bool( + self.id == other.id + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.label == other.label + and self.doc == other.doc + and self.cwlVersion == other.cwlVersion + and self.class_ == other.class_ + and self.steps == other.steps + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.label, + self.doc, + self.cwlVersion, + self.class_, + self.steps, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Workflow": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "Workflow": + raise ValidationException("Not a Workflow") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_InputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_WorkflowOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + try: + steps = load_field( + _doc.get("steps"), + idmap_steps_union_of_array_of_WorkflowStepLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `steps` field is not valid because:", + SourceLine(_doc, "steps", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `inputs`, `outputs`, `requirements`, `hints`, `label`, `doc`, `cwlVersion`, `class`, `steps`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Workflow'", None, _errors__) + _constructed = cls( + id=id, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + label=label, + doc=doc, + cwlVersion=cwlVersion, + steps=steps, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "Workflow" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.steps is not None and "steps" not in r: + r["steps"] = save( + self.steps, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="steps", + val=r.get("steps"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "inputs", + "outputs", + "requirements", + "hints", + "label", + "doc", + "cwlVersion", + "class", + "steps", + ] + ) + + +class SubworkflowFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support nested workflows in + the `run` field of [WorkflowStep](#WorkflowStep). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SubworkflowFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SubworkflowFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SubworkflowFeatureRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "SubworkflowFeatureRequirement": + raise ValidationException("Not a SubworkflowFeatureRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'SubworkflowFeatureRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "SubworkflowFeatureRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class ScatterFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support the `scatter` and + `scatterMethod` fields of [WorkflowStep](#WorkflowStep). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ScatterFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ScatterFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ScatterFeatureRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ScatterFeatureRequirement": + raise ValidationException("Not a ScatterFeatureRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'ScatterFeatureRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ScatterFeatureRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class MultipleInputFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support multiple inbound data links + listed in the `source` field of [WorkflowStepInput](#WorkflowStepInput). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "MultipleInputFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, MultipleInputFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "MultipleInputFeatureRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "MultipleInputFeatureRequirement": + raise ValidationException("Not a MultipleInputFeatureRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'MultipleInputFeatureRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "MultipleInputFeatureRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class StepInputExpressionRequirement(ProcessRequirement): + """ + Indicate that the workflow platform must support the `valueFrom` field + of [WorkflowStepInput](#WorkflowStepInput). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "StepInputExpressionRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, StepInputExpressionRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "StepInputExpressionRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "StepInputExpressionRequirement": + raise ValidationException("Not a StepInputExpressionRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'StepInputExpressionRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "StepInputExpressionRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +_vocab = { + "Any": "https://w3id.org/cwl/salad#Any", + "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", + "CWLType": "https://w3id.org/cwl/cwl#CWLType", + "CWLVersion": "https://w3id.org/cwl/cwl#CWLVersion", + "CommandInputArraySchema": "https://w3id.org/cwl/cwl#CommandInputArraySchema", + "CommandInputEnumSchema": "https://w3id.org/cwl/cwl#CommandInputEnumSchema", + "CommandInputParameter": "https://w3id.org/cwl/cwl#CommandInputParameter", + "CommandInputRecordField": "https://w3id.org/cwl/cwl#CommandInputRecordField", + "CommandInputRecordSchema": "https://w3id.org/cwl/cwl#CommandInputRecordSchema", + "CommandLineBinding": "https://w3id.org/cwl/cwl#CommandLineBinding", + "CommandLineTool": "https://w3id.org/cwl/cwl#CommandLineTool", + "CommandOutputArraySchema": "https://w3id.org/cwl/cwl#CommandOutputArraySchema", + "CommandOutputBinding": "https://w3id.org/cwl/cwl#CommandOutputBinding", + "CommandOutputEnumSchema": "https://w3id.org/cwl/cwl#CommandOutputEnumSchema", + "CommandOutputParameter": "https://w3id.org/cwl/cwl#CommandOutputParameter", + "CommandOutputRecordField": "https://w3id.org/cwl/cwl#CommandOutputRecordField", + "CommandOutputRecordSchema": "https://w3id.org/cwl/cwl#CommandOutputRecordSchema", + "Directory": "https://w3id.org/cwl/cwl#Directory", + "Dirent": "https://w3id.org/cwl/cwl#Dirent", + "DockerRequirement": "https://w3id.org/cwl/cwl#DockerRequirement", + "EnumSchema": "https://w3id.org/cwl/salad#EnumSchema", + "EnvVarRequirement": "https://w3id.org/cwl/cwl#EnvVarRequirement", + "EnvironmentDef": "https://w3id.org/cwl/cwl#EnvironmentDef", + "Expression": "https://w3id.org/cwl/cwl#Expression", + "ExpressionPlaceholder": "https://w3id.org/cwl/cwl#ExpressionPlaceholder", + "ExpressionTool": "https://w3id.org/cwl/cwl#ExpressionTool", + "ExpressionToolOutputParameter": "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter", + "File": "https://w3id.org/cwl/cwl#File", + "InitialWorkDirRequirement": "https://w3id.org/cwl/cwl#InitialWorkDirRequirement", + "InlineJavascriptRequirement": "https://w3id.org/cwl/cwl#InlineJavascriptRequirement", + "InputArraySchema": "https://w3id.org/cwl/cwl#InputArraySchema", + "InputBinding": "https://w3id.org/cwl/cwl#InputBinding", + "InputEnumSchema": "https://w3id.org/cwl/cwl#InputEnumSchema", + "InputParameter": "https://w3id.org/cwl/cwl#InputParameter", + "InputRecordField": "https://w3id.org/cwl/cwl#InputRecordField", + "InputRecordSchema": "https://w3id.org/cwl/cwl#InputRecordSchema", + "InputSchema": "https://w3id.org/cwl/cwl#InputSchema", + "LinkMergeMethod": "https://w3id.org/cwl/cwl#LinkMergeMethod", + "MultipleInputFeatureRequirement": "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement", + "OutputArraySchema": "https://w3id.org/cwl/cwl#OutputArraySchema", + "OutputBinding": "https://w3id.org/cwl/cwl#OutputBinding", + "OutputEnumSchema": "https://w3id.org/cwl/cwl#OutputEnumSchema", + "OutputParameter": "https://w3id.org/cwl/cwl#OutputParameter", + "OutputRecordField": "https://w3id.org/cwl/cwl#OutputRecordField", + "OutputRecordSchema": "https://w3id.org/cwl/cwl#OutputRecordSchema", + "OutputSchema": "https://w3id.org/cwl/cwl#OutputSchema", + "Parameter": "https://w3id.org/cwl/cwl#Parameter", + "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", + "Process": "https://w3id.org/cwl/cwl#Process", + "ProcessRequirement": "https://w3id.org/cwl/cwl#ProcessRequirement", + "RecordField": "https://w3id.org/cwl/salad#RecordField", + "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", + "ResourceRequirement": "https://w3id.org/cwl/cwl#ResourceRequirement", + "ScatterFeatureRequirement": "https://w3id.org/cwl/cwl#ScatterFeatureRequirement", + "ScatterMethod": "https://w3id.org/cwl/cwl#ScatterMethod", + "SchemaBase": "https://w3id.org/cwl/cwl#SchemaBase", + "SchemaDefRequirement": "https://w3id.org/cwl/cwl#SchemaDefRequirement", + "ShellCommandRequirement": "https://w3id.org/cwl/cwl#ShellCommandRequirement", + "Sink": "https://w3id.org/cwl/cwl#Sink", + "SoftwarePackage": "https://w3id.org/cwl/cwl#SoftwarePackage", + "SoftwareRequirement": "https://w3id.org/cwl/cwl#SoftwareRequirement", + "StepInputExpressionRequirement": "https://w3id.org/cwl/cwl#StepInputExpressionRequirement", + "SubworkflowFeatureRequirement": "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement", + "Workflow": "https://w3id.org/cwl/cwl#Workflow", + "WorkflowOutputParameter": "https://w3id.org/cwl/cwl#WorkflowOutputParameter", + "WorkflowStep": "https://w3id.org/cwl/cwl#WorkflowStep", + "WorkflowStepInput": "https://w3id.org/cwl/cwl#WorkflowStepInput", + "WorkflowStepOutput": "https://w3id.org/cwl/cwl#WorkflowStepOutput", + "array": "https://w3id.org/cwl/salad#array", + "boolean": "http://www.w3.org/2001/XMLSchema#boolean", + "dotproduct": "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct", + "double": "http://www.w3.org/2001/XMLSchema#double", + "draft-2": "https://w3id.org/cwl/cwl#draft-2", + "draft-3": "https://w3id.org/cwl/cwl#draft-3", + "draft-3.dev1": "https://w3id.org/cwl/cwl#draft-3.dev1", + "draft-3.dev2": "https://w3id.org/cwl/cwl#draft-3.dev2", + "draft-3.dev3": "https://w3id.org/cwl/cwl#draft-3.dev3", + "draft-3.dev4": "https://w3id.org/cwl/cwl#draft-3.dev4", + "draft-3.dev5": "https://w3id.org/cwl/cwl#draft-3.dev5", + "draft-4.dev1": "https://w3id.org/cwl/cwl#draft-4.dev1", + "draft-4.dev2": "https://w3id.org/cwl/cwl#draft-4.dev2", + "draft-4.dev3": "https://w3id.org/cwl/cwl#draft-4.dev3", + "enum": "https://w3id.org/cwl/salad#enum", + "flat_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct", + "float": "http://www.w3.org/2001/XMLSchema#float", + "int": "http://www.w3.org/2001/XMLSchema#int", + "long": "http://www.w3.org/2001/XMLSchema#long", + "merge_flattened": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened", + "merge_nested": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested", + "nested_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct", + "null": "https://w3id.org/cwl/salad#null", + "record": "https://w3id.org/cwl/salad#record", + "stderr": "https://w3id.org/cwl/cwl#stderr", + "stdout": "https://w3id.org/cwl/cwl#stdout", + "string": "http://www.w3.org/2001/XMLSchema#string", + "v1.0": "https://w3id.org/cwl/cwl#v1.0", + "v1.0.dev4": "https://w3id.org/cwl/cwl#v1.0.dev4", +} +_rvocab = { + "https://w3id.org/cwl/salad#Any": "Any", + "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", + "https://w3id.org/cwl/cwl#CWLType": "CWLType", + "https://w3id.org/cwl/cwl#CWLVersion": "CWLVersion", + "https://w3id.org/cwl/cwl#CommandInputArraySchema": "CommandInputArraySchema", + "https://w3id.org/cwl/cwl#CommandInputEnumSchema": "CommandInputEnumSchema", + "https://w3id.org/cwl/cwl#CommandInputParameter": "CommandInputParameter", + "https://w3id.org/cwl/cwl#CommandInputRecordField": "CommandInputRecordField", + "https://w3id.org/cwl/cwl#CommandInputRecordSchema": "CommandInputRecordSchema", + "https://w3id.org/cwl/cwl#CommandLineBinding": "CommandLineBinding", + "https://w3id.org/cwl/cwl#CommandLineTool": "CommandLineTool", + "https://w3id.org/cwl/cwl#CommandOutputArraySchema": "CommandOutputArraySchema", + "https://w3id.org/cwl/cwl#CommandOutputBinding": "CommandOutputBinding", + "https://w3id.org/cwl/cwl#CommandOutputEnumSchema": "CommandOutputEnumSchema", + "https://w3id.org/cwl/cwl#CommandOutputParameter": "CommandOutputParameter", + "https://w3id.org/cwl/cwl#CommandOutputRecordField": "CommandOutputRecordField", + "https://w3id.org/cwl/cwl#CommandOutputRecordSchema": "CommandOutputRecordSchema", + "https://w3id.org/cwl/cwl#Directory": "Directory", + "https://w3id.org/cwl/cwl#Dirent": "Dirent", + "https://w3id.org/cwl/cwl#DockerRequirement": "DockerRequirement", + "https://w3id.org/cwl/salad#EnumSchema": "EnumSchema", + "https://w3id.org/cwl/cwl#EnvVarRequirement": "EnvVarRequirement", + "https://w3id.org/cwl/cwl#EnvironmentDef": "EnvironmentDef", + "https://w3id.org/cwl/cwl#Expression": "Expression", + "https://w3id.org/cwl/cwl#ExpressionPlaceholder": "ExpressionPlaceholder", + "https://w3id.org/cwl/cwl#ExpressionTool": "ExpressionTool", + "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter": "ExpressionToolOutputParameter", + "https://w3id.org/cwl/cwl#File": "File", + "https://w3id.org/cwl/cwl#InitialWorkDirRequirement": "InitialWorkDirRequirement", + "https://w3id.org/cwl/cwl#InlineJavascriptRequirement": "InlineJavascriptRequirement", + "https://w3id.org/cwl/cwl#InputArraySchema": "InputArraySchema", + "https://w3id.org/cwl/cwl#InputBinding": "InputBinding", + "https://w3id.org/cwl/cwl#InputEnumSchema": "InputEnumSchema", + "https://w3id.org/cwl/cwl#InputParameter": "InputParameter", + "https://w3id.org/cwl/cwl#InputRecordField": "InputRecordField", + "https://w3id.org/cwl/cwl#InputRecordSchema": "InputRecordSchema", + "https://w3id.org/cwl/cwl#InputSchema": "InputSchema", + "https://w3id.org/cwl/cwl#LinkMergeMethod": "LinkMergeMethod", + "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement": "MultipleInputFeatureRequirement", + "https://w3id.org/cwl/cwl#OutputArraySchema": "OutputArraySchema", + "https://w3id.org/cwl/cwl#OutputBinding": "OutputBinding", + "https://w3id.org/cwl/cwl#OutputEnumSchema": "OutputEnumSchema", + "https://w3id.org/cwl/cwl#OutputParameter": "OutputParameter", + "https://w3id.org/cwl/cwl#OutputRecordField": "OutputRecordField", + "https://w3id.org/cwl/cwl#OutputRecordSchema": "OutputRecordSchema", + "https://w3id.org/cwl/cwl#OutputSchema": "OutputSchema", + "https://w3id.org/cwl/cwl#Parameter": "Parameter", + "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", + "https://w3id.org/cwl/cwl#Process": "Process", + "https://w3id.org/cwl/cwl#ProcessRequirement": "ProcessRequirement", + "https://w3id.org/cwl/salad#RecordField": "RecordField", + "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", + "https://w3id.org/cwl/cwl#ResourceRequirement": "ResourceRequirement", + "https://w3id.org/cwl/cwl#ScatterFeatureRequirement": "ScatterFeatureRequirement", + "https://w3id.org/cwl/cwl#ScatterMethod": "ScatterMethod", + "https://w3id.org/cwl/cwl#SchemaBase": "SchemaBase", + "https://w3id.org/cwl/cwl#SchemaDefRequirement": "SchemaDefRequirement", + "https://w3id.org/cwl/cwl#ShellCommandRequirement": "ShellCommandRequirement", + "https://w3id.org/cwl/cwl#Sink": "Sink", + "https://w3id.org/cwl/cwl#SoftwarePackage": "SoftwarePackage", + "https://w3id.org/cwl/cwl#SoftwareRequirement": "SoftwareRequirement", + "https://w3id.org/cwl/cwl#StepInputExpressionRequirement": "StepInputExpressionRequirement", + "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement": "SubworkflowFeatureRequirement", + "https://w3id.org/cwl/cwl#Workflow": "Workflow", + "https://w3id.org/cwl/cwl#WorkflowOutputParameter": "WorkflowOutputParameter", + "https://w3id.org/cwl/cwl#WorkflowStep": "WorkflowStep", + "https://w3id.org/cwl/cwl#WorkflowStepInput": "WorkflowStepInput", + "https://w3id.org/cwl/cwl#WorkflowStepOutput": "WorkflowStepOutput", + "https://w3id.org/cwl/salad#array": "array", + "http://www.w3.org/2001/XMLSchema#boolean": "boolean", + "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct": "dotproduct", + "http://www.w3.org/2001/XMLSchema#double": "double", + "https://w3id.org/cwl/cwl#draft-2": "draft-2", + "https://w3id.org/cwl/cwl#draft-3": "draft-3", + "https://w3id.org/cwl/cwl#draft-3.dev1": "draft-3.dev1", + "https://w3id.org/cwl/cwl#draft-3.dev2": "draft-3.dev2", + "https://w3id.org/cwl/cwl#draft-3.dev3": "draft-3.dev3", + "https://w3id.org/cwl/cwl#draft-3.dev4": "draft-3.dev4", + "https://w3id.org/cwl/cwl#draft-3.dev5": "draft-3.dev5", + "https://w3id.org/cwl/cwl#draft-4.dev1": "draft-4.dev1", + "https://w3id.org/cwl/cwl#draft-4.dev2": "draft-4.dev2", + "https://w3id.org/cwl/cwl#draft-4.dev3": "draft-4.dev3", + "https://w3id.org/cwl/salad#enum": "enum", + "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct": "flat_crossproduct", + "http://www.w3.org/2001/XMLSchema#float": "float", + "http://www.w3.org/2001/XMLSchema#int": "int", + "http://www.w3.org/2001/XMLSchema#long": "long", + "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened": "merge_flattened", + "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested": "merge_nested", + "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct": "nested_crossproduct", + "https://w3id.org/cwl/salad#null": "null", + "https://w3id.org/cwl/salad#record": "record", + "https://w3id.org/cwl/cwl#stderr": "stderr", + "https://w3id.org/cwl/cwl#stdout": "stdout", + "http://www.w3.org/2001/XMLSchema#string": "string", + "https://w3id.org/cwl/cwl#v1.0": "v1.0", + "https://w3id.org/cwl/cwl#v1.0.dev4": "v1.0.dev4", +} + +strtype = _PrimitiveLoader(str) +inttype = _PrimitiveLoader(int) +floattype = _PrimitiveLoader(float) +booltype = _PrimitiveLoader(bool) +None_type = _PrimitiveLoader(type(None)) +Any_type = _AnyLoader() +PrimitiveTypeLoader = _EnumLoader( + ( + "null", + "boolean", + "int", + "long", + "float", + "double", + "string", + ), + "PrimitiveType", +) +AnyLoader = _EnumLoader(("Any",), "Any") +RecordFieldLoader = _RecordLoader(RecordField) +RecordSchemaLoader = _RecordLoader(RecordSchema) +EnumSchemaLoader = _RecordLoader(EnumSchema) +ArraySchemaLoader = _RecordLoader(ArraySchema) +CWLVersionLoader = _EnumLoader( + ( + "draft-2", + "draft-3.dev1", + "draft-3.dev2", + "draft-3.dev3", + "draft-3.dev4", + "draft-3.dev5", + "draft-3", + "draft-4.dev1", + "draft-4.dev2", + "draft-4.dev3", + "v1.0.dev4", + "v1.0", + ), + "CWLVersion", +) +CWLTypeLoader = _EnumLoader( + ( + "null", + "boolean", + "int", + "long", + "float", + "double", + "string", + "File", + "Directory", + ), + "CWLType", +) +FileLoader = _RecordLoader(File) +DirectoryLoader = _RecordLoader(Directory) +ExpressionLoader = _ExpressionLoader(str) +InputRecordFieldLoader = _RecordLoader(InputRecordField) +InputRecordSchemaLoader = _RecordLoader(InputRecordSchema) +InputEnumSchemaLoader = _RecordLoader(InputEnumSchema) +InputArraySchemaLoader = _RecordLoader(InputArraySchema) +OutputRecordFieldLoader = _RecordLoader(OutputRecordField) +OutputRecordSchemaLoader = _RecordLoader(OutputRecordSchema) +OutputEnumSchemaLoader = _RecordLoader(OutputEnumSchema) +OutputArraySchemaLoader = _RecordLoader(OutputArraySchema) +InputParameterLoader = _RecordLoader(InputParameter) +OutputParameterLoader = _RecordLoader(OutputParameter) +InlineJavascriptRequirementLoader = _RecordLoader(InlineJavascriptRequirement) +SchemaDefRequirementLoader = _RecordLoader(SchemaDefRequirement) +EnvironmentDefLoader = _RecordLoader(EnvironmentDef) +CommandLineBindingLoader = _RecordLoader(CommandLineBinding) +CommandOutputBindingLoader = _RecordLoader(CommandOutputBinding) +CommandInputRecordFieldLoader = _RecordLoader(CommandInputRecordField) +CommandInputRecordSchemaLoader = _RecordLoader(CommandInputRecordSchema) +CommandInputEnumSchemaLoader = _RecordLoader(CommandInputEnumSchema) +CommandInputArraySchemaLoader = _RecordLoader(CommandInputArraySchema) +CommandOutputRecordFieldLoader = _RecordLoader(CommandOutputRecordField) +CommandOutputRecordSchemaLoader = _RecordLoader(CommandOutputRecordSchema) +CommandOutputEnumSchemaLoader = _RecordLoader(CommandOutputEnumSchema) +CommandOutputArraySchemaLoader = _RecordLoader(CommandOutputArraySchema) +CommandInputParameterLoader = _RecordLoader(CommandInputParameter) +CommandOutputParameterLoader = _RecordLoader(CommandOutputParameter) +stdoutLoader = _EnumLoader(("stdout",), "stdout") +stderrLoader = _EnumLoader(("stderr",), "stderr") +CommandLineToolLoader = _RecordLoader(CommandLineTool) +DockerRequirementLoader = _RecordLoader(DockerRequirement) +SoftwareRequirementLoader = _RecordLoader(SoftwareRequirement) +SoftwarePackageLoader = _RecordLoader(SoftwarePackage) +DirentLoader = _RecordLoader(Dirent) +InitialWorkDirRequirementLoader = _RecordLoader(InitialWorkDirRequirement) +EnvVarRequirementLoader = _RecordLoader(EnvVarRequirement) +ShellCommandRequirementLoader = _RecordLoader(ShellCommandRequirement) +ResourceRequirementLoader = _RecordLoader(ResourceRequirement) +ExpressionToolOutputParameterLoader = _RecordLoader(ExpressionToolOutputParameter) +ExpressionToolLoader = _RecordLoader(ExpressionTool) +LinkMergeMethodLoader = _EnumLoader( + ( + "merge_nested", + "merge_flattened", + ), + "LinkMergeMethod", +) +WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter) +WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput) +WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput) +ScatterMethodLoader = _EnumLoader( + ( + "dotproduct", + "nested_crossproduct", + "flat_crossproduct", + ), + "ScatterMethod", +) +WorkflowStepLoader = _RecordLoader(WorkflowStep) +WorkflowLoader = _RecordLoader(Workflow) +SubworkflowFeatureRequirementLoader = _RecordLoader(SubworkflowFeatureRequirement) +ScatterFeatureRequirementLoader = _RecordLoader(ScatterFeatureRequirement) +MultipleInputFeatureRequirementLoader = _RecordLoader(MultipleInputFeatureRequirement) +StepInputExpressionRequirementLoader = _RecordLoader(StepInputExpressionRequirement) +uri_strtype_True_False_None = _URILoader(strtype, True, False, None) +union_of_None_type_or_strtype = _UnionLoader( + ( + None_type, + strtype, + ) +) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader( + ( + PrimitiveTypeLoader, + RecordSchemaLoader, + EnumSchemaLoader, + ArraySchemaLoader, + strtype, + ) +) +array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype +) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader( + ( + PrimitiveTypeLoader, + RecordSchemaLoader, + EnumSchemaLoader, + ArraySchemaLoader, + strtype, + array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, + 2, +) +array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) +union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_RecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_RecordFieldLoader, "name", "type" +) +Record_symbolLoader = _EnumLoader(("record",), "Record_symbol") +typedsl_Record_symbolLoader_2 = _TypeDSLLoader(Record_symbolLoader, 2) +array_of_strtype = _ArrayLoader(strtype) +uri_array_of_strtype_True_False_None = _URILoader(array_of_strtype, True, False, None) +Enum_symbolLoader = _EnumLoader(("enum",), "Enum_symbol") +typedsl_Enum_symbolLoader_2 = _TypeDSLLoader(Enum_symbolLoader, 2) +Array_symbolLoader = _EnumLoader(("array",), "Array_symbol") +typedsl_Array_symbolLoader_2 = _TypeDSLLoader(Array_symbolLoader, 2) +File_classLoader = _EnumLoader(("File",), "File_class") +uri_File_classLoader_False_True_None = _URILoader(File_classLoader, False, True, None) +uri_union_of_None_type_or_strtype_False_False_None = _URILoader( + union_of_None_type_or_strtype, False, False, None +) +union_of_None_type_or_inttype = _UnionLoader( + ( + None_type, + inttype, + ) +) +union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + FileLoader, + DirectoryLoader, + ) +) +array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( + union_of_FileLoader_or_DirectoryLoader +) +union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + None_type, + array_of_union_of_FileLoader_or_DirectoryLoader, + ) +) +uri_union_of_None_type_or_strtype_True_False_None = _URILoader( + union_of_None_type_or_strtype, True, False, None +) +Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") +uri_Directory_classLoader_False_True_None = _URILoader( + Directory_classLoader, False, True, None +) +union_of_strtype_or_ExpressionLoader = _UnionLoader( + ( + strtype, + ExpressionLoader, + ) +) +array_of_union_of_strtype_or_ExpressionLoader = _ArrayLoader( + union_of_strtype_or_ExpressionLoader +) +union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + strtype, + ExpressionLoader, + array_of_union_of_strtype_or_ExpressionLoader, + ) +) +union_of_None_type_or_booltype = _UnionLoader( + ( + None_type, + booltype, + ) +) +union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader( + ( + None_type, + strtype, + array_of_strtype, + ) +) +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + 2, +) +union_of_None_type_or_CommandLineBindingLoader = _UnionLoader( + ( + None_type, + CommandLineBindingLoader, + ) +) +array_of_InputRecordFieldLoader = _ArrayLoader(InputRecordFieldLoader) +union_of_None_type_or_array_of_InputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_InputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_InputRecordFieldLoader, "name", "type" +) +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + OutputRecordSchemaLoader, + OutputEnumSchemaLoader, + OutputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + OutputRecordSchemaLoader, + OutputEnumSchemaLoader, + OutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + 2, +) +union_of_None_type_or_CommandOutputBindingLoader = _UnionLoader( + ( + None_type, + CommandOutputBindingLoader, + ) +) +array_of_OutputRecordFieldLoader = _ArrayLoader(OutputRecordFieldLoader) +union_of_None_type_or_array_of_OutputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_OutputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_OutputRecordFieldLoader, "name", "type" +) +union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + strtype, + array_of_strtype, + ExpressionLoader, + ) +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, + True, + False, + None, +) +union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type = _UnionLoader( + ( + None_type, + FileLoader, + DirectoryLoader, + Any_type, + ) +) +union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( + ( + None_type, + CWLTypeLoader, + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + 2, +) +union_of_None_type_or_strtype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + strtype, + ExpressionLoader, + ) +) +uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None = _URILoader( + union_of_None_type_or_strtype_or_ExpressionLoader, True, False, None +) +array_of_InputParameterLoader = _ArrayLoader(InputParameterLoader) +idmap_inputs_array_of_InputParameterLoader = _IdMapLoader( + array_of_InputParameterLoader, "id", "type" +) +array_of_OutputParameterLoader = _ArrayLoader(OutputParameterLoader) +idmap_outputs_array_of_OutputParameterLoader = _IdMapLoader( + array_of_OutputParameterLoader, "id", "type" +) +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( + ( + InlineJavascriptRequirementLoader, + SchemaDefRequirementLoader, + DockerRequirementLoader, + SoftwareRequirementLoader, + InitialWorkDirRequirementLoader, + EnvVarRequirementLoader, + ShellCommandRequirementLoader, + ResourceRequirementLoader, + SubworkflowFeatureRequirementLoader, + ScatterFeatureRequirementLoader, + MultipleInputFeatureRequirementLoader, + StepInputExpressionRequirementLoader, + ) +) +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader +) +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( + ( + None_type, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + ) +) +idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + "class", + "None", +) +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( + ( + InlineJavascriptRequirementLoader, + SchemaDefRequirementLoader, + DockerRequirementLoader, + SoftwareRequirementLoader, + InitialWorkDirRequirementLoader, + EnvVarRequirementLoader, + ShellCommandRequirementLoader, + ResourceRequirementLoader, + SubworkflowFeatureRequirementLoader, + ScatterFeatureRequirementLoader, + MultipleInputFeatureRequirementLoader, + StepInputExpressionRequirementLoader, + Any_type, + ) +) +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type +) +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( + ( + None_type, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + ) +) +idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + "class", + "None", +) +union_of_None_type_or_CWLVersionLoader = _UnionLoader( + ( + None_type, + CWLVersionLoader, + ) +) +uri_union_of_None_type_or_CWLVersionLoader_False_True_None = _URILoader( + union_of_None_type_or_CWLVersionLoader, False, True, None +) +InlineJavascriptRequirement_classLoader = _EnumLoader( + ("InlineJavascriptRequirement",), "InlineJavascriptRequirement_class" +) +uri_InlineJavascriptRequirement_classLoader_False_True_None = _URILoader( + InlineJavascriptRequirement_classLoader, False, True, None +) +union_of_None_type_or_array_of_strtype = _UnionLoader( + ( + None_type, + array_of_strtype, + ) +) +SchemaDefRequirement_classLoader = _EnumLoader( + ("SchemaDefRequirement",), "SchemaDefRequirement_class" +) +uri_SchemaDefRequirement_classLoader_False_True_None = _URILoader( + SchemaDefRequirement_classLoader, False, True, None +) +union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = ( + _UnionLoader( + ( + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, + ) + ) +) +array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = _ArrayLoader( + union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader +) +union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype = _UnionLoader( + ( + None_type, + strtype, + ExpressionLoader, + array_of_strtype, + ) +) +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + 2, +) +array_of_CommandInputRecordFieldLoader = _ArrayLoader(CommandInputRecordFieldLoader) +union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_CommandInputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" + ) +) +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + 2, +) +array_of_CommandOutputRecordFieldLoader = _ArrayLoader(CommandOutputRecordFieldLoader) +union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_CommandOutputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" + ) +) +union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + None_type, + CWLTypeLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + 2, +) +union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + None_type, + CWLTypeLoader, + stdoutLoader, + stderrLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + 2, +) +CommandLineTool_classLoader = _EnumLoader(("CommandLineTool",), "CommandLineTool_class") +uri_CommandLineTool_classLoader_False_True_None = _URILoader( + CommandLineTool_classLoader, False, True, None +) +array_of_CommandInputParameterLoader = _ArrayLoader(CommandInputParameterLoader) +idmap_inputs_array_of_CommandInputParameterLoader = _IdMapLoader( + array_of_CommandInputParameterLoader, "id", "type" +) +array_of_CommandOutputParameterLoader = _ArrayLoader(CommandOutputParameterLoader) +idmap_outputs_array_of_CommandOutputParameterLoader = _IdMapLoader( + array_of_CommandOutputParameterLoader, "id", "type" +) +union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( + ( + strtype, + ExpressionLoader, + CommandLineBindingLoader, + ) +) +array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( + _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) +) +union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( + ( + None_type, + array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + ) +) +array_of_inttype = _ArrayLoader(inttype) +union_of_None_type_or_array_of_inttype = _UnionLoader( + ( + None_type, + array_of_inttype, + ) +) +DockerRequirement_classLoader = _EnumLoader( + ("DockerRequirement",), "DockerRequirement_class" +) +uri_DockerRequirement_classLoader_False_True_None = _URILoader( + DockerRequirement_classLoader, False, True, None +) +SoftwareRequirement_classLoader = _EnumLoader( + ("SoftwareRequirement",), "SoftwareRequirement_class" +) +uri_SoftwareRequirement_classLoader_False_True_None = _URILoader( + SoftwareRequirement_classLoader, False, True, None +) +array_of_SoftwarePackageLoader = _ArrayLoader(SoftwarePackageLoader) +idmap_packages_array_of_SoftwarePackageLoader = _IdMapLoader( + array_of_SoftwarePackageLoader, "package", "specs" +) +uri_union_of_None_type_or_array_of_strtype_False_False_None = _URILoader( + union_of_None_type_or_array_of_strtype, False, False, None +) +InitialWorkDirRequirement_classLoader = _EnumLoader( + ("InitialWorkDirRequirement",), "InitialWorkDirRequirement_class" +) +uri_InitialWorkDirRequirement_classLoader_False_True_None = _URILoader( + InitialWorkDirRequirement_classLoader, False, True, None +) +union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = _UnionLoader( + ( + FileLoader, + DirectoryLoader, + DirentLoader, + strtype, + ExpressionLoader, + ) +) +array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = _ArrayLoader( + union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader +) +union_of_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader_or_strtype_or_ExpressionLoader = _UnionLoader( + ( + array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader, + strtype, + ExpressionLoader, + ) +) +EnvVarRequirement_classLoader = _EnumLoader( + ("EnvVarRequirement",), "EnvVarRequirement_class" +) +uri_EnvVarRequirement_classLoader_False_True_None = _URILoader( + EnvVarRequirement_classLoader, False, True, None +) +array_of_EnvironmentDefLoader = _ArrayLoader(EnvironmentDefLoader) +idmap_envDef_array_of_EnvironmentDefLoader = _IdMapLoader( + array_of_EnvironmentDefLoader, "envName", "envValue" +) +ShellCommandRequirement_classLoader = _EnumLoader( + ("ShellCommandRequirement",), "ShellCommandRequirement_class" +) +uri_ShellCommandRequirement_classLoader_False_True_None = _URILoader( + ShellCommandRequirement_classLoader, False, True, None +) +ResourceRequirement_classLoader = _EnumLoader( + ("ResourceRequirement",), "ResourceRequirement_class" +) +uri_ResourceRequirement_classLoader_False_True_None = _URILoader( + ResourceRequirement_classLoader, False, True, None +) +union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + inttype, + strtype, + ExpressionLoader, + ) +) +union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + None_type, + CWLTypeLoader, + OutputRecordSchemaLoader, + OutputEnumSchemaLoader, + OutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + 2, +) +ExpressionTool_classLoader = _EnumLoader(("ExpressionTool",), "ExpressionTool_class") +uri_ExpressionTool_classLoader_False_True_None = _URILoader( + ExpressionTool_classLoader, False, True, None +) +array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( + ExpressionToolOutputParameterLoader +) +idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( + array_of_ExpressionToolOutputParameterLoader, "id", "type" +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1 = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1 +) +union_of_None_type_or_LinkMergeMethodLoader = _UnionLoader( + ( + None_type, + LinkMergeMethodLoader, + ) +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2 = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2 +) +array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader) +idmap_in__array_of_WorkflowStepInputLoader = _IdMapLoader( + array_of_WorkflowStepInputLoader, "id", "source" +) +union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( + ( + strtype, + WorkflowStepOutputLoader, + ) +) +array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader( + union_of_strtype_or_WorkflowStepOutputLoader +) +union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( + (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) +) +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = ( + _URILoader( + union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, + True, + False, + None, + ) +) +array_of_Any_type = _ArrayLoader(Any_type) +union_of_None_type_or_array_of_Any_type = _UnionLoader( + ( + None_type, + array_of_Any_type, + ) +) +idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( + union_of_None_type_or_array_of_Any_type, "class", "None" +) +union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( + _UnionLoader( + ( + strtype, + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + ) + ) +) +uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None = _URILoader( + union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, + False, + False, + None, +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0 = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 0 +) +union_of_None_type_or_ScatterMethodLoader = _UnionLoader( + ( + None_type, + ScatterMethodLoader, + ) +) +uri_union_of_None_type_or_ScatterMethodLoader_False_True_None = _URILoader( + union_of_None_type_or_ScatterMethodLoader, False, True, None +) +Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") +uri_Workflow_classLoader_False_True_None = _URILoader( + Workflow_classLoader, False, True, None +) +array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) +idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( + array_of_WorkflowOutputParameterLoader, "id", "type" +) +array_of_WorkflowStepLoader = _ArrayLoader(WorkflowStepLoader) +union_of_array_of_WorkflowStepLoader = _UnionLoader((array_of_WorkflowStepLoader,)) +idmap_steps_union_of_array_of_WorkflowStepLoader = _IdMapLoader( + union_of_array_of_WorkflowStepLoader, "id", "None" +) +SubworkflowFeatureRequirement_classLoader = _EnumLoader( + ("SubworkflowFeatureRequirement",), "SubworkflowFeatureRequirement_class" +) +uri_SubworkflowFeatureRequirement_classLoader_False_True_None = _URILoader( + SubworkflowFeatureRequirement_classLoader, False, True, None +) +ScatterFeatureRequirement_classLoader = _EnumLoader( + ("ScatterFeatureRequirement",), "ScatterFeatureRequirement_class" +) +uri_ScatterFeatureRequirement_classLoader_False_True_None = _URILoader( + ScatterFeatureRequirement_classLoader, False, True, None +) +MultipleInputFeatureRequirement_classLoader = _EnumLoader( + ("MultipleInputFeatureRequirement",), "MultipleInputFeatureRequirement_class" +) +uri_MultipleInputFeatureRequirement_classLoader_False_True_None = _URILoader( + MultipleInputFeatureRequirement_classLoader, False, True, None +) +StepInputExpressionRequirement_classLoader = _EnumLoader( + ("StepInputExpressionRequirement",), "StepInputExpressionRequirement_class" +) +uri_StepInputExpressionRequirement_classLoader_False_True_None = _URILoader( + StepInputExpressionRequirement_classLoader, False, True, None +) +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( + ( + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + ) +) +array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( + _ArrayLoader( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader + ) +) +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( + ( + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, + ) +) + + +def load_document( + doc: Any, + baseuri: Optional[str] = None, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + if baseuri is None: + baseuri = file_uri(os.getcwd()) + "/" + if loadingOptions is None: + loadingOptions = LoadingOptions() + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, + doc, + baseuri, + loadingOptions, + ) + return result + + +def load_document_with_metadata( + doc: Any, + baseuri: Optional[str] = None, + loadingOptions: Optional[LoadingOptions] = None, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> Any: + if baseuri is None: + baseuri = file_uri(os.getcwd()) + "/" + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=baseuri) + return _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, + doc, + baseuri, + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + +def load_document_by_string( + string: Any, + uri: str, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + yaml = yaml_no_ts() + result = yaml.load(string) + add_lc_filename(result, uri) + + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=uri) + + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, + result, + uri, + loadingOptions, + ) + return result + + +def load_document_by_yaml( + yaml: Any, + uri: str, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + """ + Shortcut to load via a YAML object. + yaml: must be from ruamel.yaml.main.YAML.load with preserve_quotes=True + """ + add_lc_filename(yaml, uri) + + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=uri) + + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, + yaml, + uri, + loadingOptions, + ) + return result diff --git a/schema_salad/tests/cwl_v1_1.py b/schema_salad/tests/cwl_v1_1.py new file mode 100644 index 000000000..133ccfb02 --- /dev/null +++ b/schema_salad/tests/cwl_v1_1.py @@ -0,0 +1,22729 @@ +# +# This file was autogenerated using schema-salad-tool --codegen=python +# The code itself is released under the Apache 2.0 license and the help text is +# subject to the license of the original schema. +import copy +import logging +import os +import pathlib +import re +import tempfile +import uuid as _uuid__ # pylint: disable=unused-import # noqa: F401 +import xml.sax # nosec +from abc import ABC, abstractmethod +from io import StringIO +from typing import ( + Any, + Dict, + List, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit +from urllib.request import pathname2url + +from rdflib import Graph +from rdflib.plugins.parsers.notation3 import BadSyntax +from ruamel.yaml.comments import CommentedMap, CommentedSeq + +from schema_salad.exceptions import SchemaSaladException, ValidationException +from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher +from schema_salad.sourceline import SourceLine, add_lc_filename +from schema_salad.utils import CacheType, yaml_no_ts # requires schema-salad v8.2+ + +_vocab: Dict[str, str] = {} +_rvocab: Dict[str, str] = {} + +_logger = logging.getLogger("salad") + + +IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] + + +doc_line_info = CommentedMap() +inserted_line_info: Dict[int, int] = {} + + +class LoadingOptions: + idx: IdxType + fileuri: Optional[str] + baseuri: str + namespaces: MutableMapping[str, str] + schemas: MutableSequence[str] + original_doc: Optional[Any] + addl_metadata: MutableMapping[str, Any] + fetcher: Fetcher + vocab: Dict[str, str] + rvocab: Dict[str, str] + cache: CacheType + imports: List[str] + includes: List[str] + + def __init__( + self, + fetcher: Optional[Fetcher] = None, + namespaces: Optional[Dict[str, str]] = None, + schemas: Optional[List[str]] = None, + fileuri: Optional[str] = None, + copyfrom: Optional["LoadingOptions"] = None, + original_doc: Optional[Any] = None, + addl_metadata: Optional[Dict[str, str]] = None, + baseuri: Optional[str] = None, + idx: Optional[IdxType] = None, + imports: Optional[List[str]] = None, + includes: Optional[List[str]] = None, + ) -> None: + """Create a LoadingOptions object.""" + self.original_doc = original_doc + + if idx is not None: + self.idx = idx + else: + self.idx = copyfrom.idx if copyfrom is not None else {} + + if fileuri is not None: + self.fileuri = fileuri + else: + self.fileuri = copyfrom.fileuri if copyfrom is not None else None + + if baseuri is not None: + self.baseuri = baseuri + else: + self.baseuri = copyfrom.baseuri if copyfrom is not None else "" + + if namespaces is not None: + self.namespaces = namespaces + else: + self.namespaces = copyfrom.namespaces if copyfrom is not None else {} + + if schemas is not None: + self.schemas = schemas + else: + self.schemas = copyfrom.schemas if copyfrom is not None else [] + + if addl_metadata is not None: + self.addl_metadata = addl_metadata + else: + self.addl_metadata = copyfrom.addl_metadata if copyfrom is not None else {} + + if imports is not None: + self.imports = imports + else: + self.imports = copyfrom.imports if copyfrom is not None else [] + + if includes is not None: + self.includes = includes + else: + self.includes = copyfrom.includes if copyfrom is not None else [] + + if fetcher is not None: + self.fetcher = fetcher + elif copyfrom is not None: + self.fetcher = copyfrom.fetcher + else: + import requests + from cachecontrol.caches import FileCache + from cachecontrol.wrapper import CacheControl + + root = pathlib.Path(os.environ.get("HOME", tempfile.gettempdir())) + session = CacheControl( + requests.Session(), + cache=FileCache(root / ".cache" / "salad"), + ) + self.fetcher: Fetcher = DefaultFetcher({}, session) + + self.cache = ( + self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} + ) + + self.vocab = _vocab + self.rvocab = _rvocab + + if namespaces is not None: + self.vocab = self.vocab.copy() + self.rvocab = self.rvocab.copy() + for k, v in namespaces.items(): + self.vocab[k] = v + self.rvocab[v] = k + + @property + def graph(self) -> Graph: + """Generate a merged rdflib.Graph from all entries in self.schemas.""" + graph = Graph() + if not self.schemas: + return graph + key = str(hash(tuple(self.schemas))) + if key in self.cache: + return cast(Graph, self.cache[key]) + for schema in self.schemas: + fetchurl = ( + self.fetcher.urljoin(self.fileuri, schema) + if self.fileuri is not None + else pathlib.Path(schema).resolve().as_uri() + ) + if fetchurl not in self.cache or self.cache[fetchurl] is True: + _logger.debug("Getting external schema %s", fetchurl) + try: + content = self.fetcher.fetch_text(fetchurl) + except Exception as e: + _logger.warning( + "Could not load extension schema %s: %s", fetchurl, str(e) + ) + continue + newGraph = Graph() + err_msg = "unknown error" + for fmt in ["xml", "turtle"]: + try: + newGraph.parse(data=content, format=fmt, publicID=str(fetchurl)) + self.cache[fetchurl] = newGraph + graph += newGraph + break + except (xml.sax.SAXParseException, TypeError, BadSyntax) as e: + err_msg = str(e) + else: + _logger.warning( + "Could not load extension schema %s: %s", fetchurl, err_msg + ) + self.cache[key] = graph + return graph + + +class Saveable(ABC): + """Mark classes than have a save() and fromDoc() function.""" + + @classmethod + @abstractmethod + def fromDoc( + cls, + _doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Saveable": + """Construct this object from the result of yaml.load().""" + + @abstractmethod + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + ) -> CommentedMap: + """Convert this object to a JSON/YAML friendly dictionary.""" + + +def load_field(val, fieldtype, baseuri, loadingOptions): + # type: (Union[str, Dict[str, str]], _Loader, str, LoadingOptions) -> Any + if isinstance(val, MutableMapping): + if "$import" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]) + result, metadata = _document_load_by_url( + fieldtype, + url, + loadingOptions, + ) + loadingOptions.imports.append(url) + return result + elif "$include" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url = loadingOptions.fetcher.urljoin( + loadingOptions.fileuri, val["$include"] + ) + val = loadingOptions.fetcher.fetch_text(url) + loadingOptions.includes.append(url) + return fieldtype.load(val, baseuri, loadingOptions) + + +save_type = Optional[ + Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str] +] + + +def add_kv( + old_doc: CommentedMap, + new_doc: CommentedMap, + line_numbers: Dict[Any, Dict[str, int]], + key: str, + val: Any, + max_len: int, + cols: Dict[int, int], + min_col: int = 0, +) -> int: + """Add key value pair into Commented Map. + + Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers + for each key/val pair in the old CommentedMap,key/val pair to insert, max_line of the old CommentedMap, + and max col value taken for each line. + """ + if len(inserted_line_info.keys()) >= 1: + max_line = max(inserted_line_info.keys()) + 1 + else: + max_line = 0 + if ( + key in line_numbers + ): # If the key to insert is in the original CommentedMap as a key + line_info = old_doc.lc.data[key] + if line_info[0] not in inserted_line_info: + new_doc.lc.add_kv_line_col(key, old_doc.lc.data[key]) + inserted_line_info[old_doc.lc.data[key][0]] = old_doc.lc.data[key][1] + else: + line = line_info[0] + while line in inserted_line_info.keys(): + line += 1 + new_doc.lc.add_kv_line_col( + key, + [ + line, + old_doc.lc.data[key][1], + line + (line - old_doc.lc.data[key][2]), + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[line] = old_doc.lc.data[key][1] + return max_len + elif isinstance(val, (int, float, str)) and not isinstance( + val, bool + ): # If the value is hashable + if val in line_numbers: # If the value is in the original CommentedMap + line = line_numbers[val]["line"] + if line in inserted_line_info: + line = max_line + if line in cols: + col = max(line_numbers[val]["col"], cols[line]) + else: + col = line_numbers[val]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + cols[line] = col + len("id") + 2 + return max_len + elif isinstance(val, str): + if val + "?" in line_numbers: + line = line_numbers[val + "?"]["line"] + if line in inserted_line_info: + line = max_line + if line in cols: + col = max(line_numbers[val + "?"]["col"], cols[line]) + else: + col = line_numbers[val + "?"]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + cols[line] = col + len("id") + 2 + return max_len + elif old_doc: + if val in old_doc: + index = old_doc.lc.data.index(val) + line_info = old_doc.lc.data[index] + if line_info[0] not in inserted_line_info: + new_doc.lc.add_kv_line_col(key, old_doc.lc.data[index]) + inserted_line_info[old_doc.lc.data[index][0]] = old_doc.lc.data[ + index + ][1] + else: + new_doc.lc.add_kv_line_col( + key, + [ + max_line, + old_doc.lc.data[index][1], + max_line + (max_line - old_doc.lc.data[index][2]), + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[max_line] = old_doc.lc.data[index][1] + # If neither the key or value is in the original CommentedMap (or value is not hashable) + new_doc.lc.add_kv_line_col( + key, [max_line, min_col, max_line, min_col + len(key) + 2] + ) + inserted_line_info[max_line] = min_col + len(key) + 2 + return max_len + 1 + + +def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: + """Get line numbers for kv pairs in CommentedMap. + + For each key/value pair in a CommentedMap, save the line/col info into a dictionary, + only save value info if value is hashable. + """ + line_numbers: Dict[Any, Dict[str, int]] = {} + if doc is None: + return {} + if doc.lc.data is None: + return {} + for key, value in doc.lc.data.items(): + line_numbers[key] = {} + + line_numbers[key]["line"] = doc.lc.data[key][0] + line_numbers[key]["col"] = doc.lc.data[key][1] + if isinstance(value, (int, float, bool, str)): + line_numbers[value] = {} + line_numbers[value]["line"] = doc.lc.data[key][2] + line_numbers[value]["col"] = doc.lc.data[key][3] + return line_numbers + + +def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> int: + min_col = 0 + for line in line_numbers: + if line_numbers[line]["col"] > min_col: + min_col = line_numbers[line]["col"] + return min_col + + +def get_max_line_num(doc: CommentedMap) -> int: + """Get the max line number for a CommentedMap. + + Iterate through the the key with the highest line number until you reach a non-CommentedMap value + or empty CommentedMap. + """ + max_line = 0 + max_key = "" + cur = doc + while isinstance(cur, CommentedMap) and len(cur) > 0: + for key in cur.lc.data.keys(): + if cur.lc.data[key][2] >= max_line: + max_line = cur.lc.data[key][2] + max_key = key + cur = cur[max_key] + return max_line + 1 + + +def save( + val: Any, + top: bool = True, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, +) -> save_type: + """Save a val of any type. + + Recursively calls save method from class if val is of type Saveable. + Otherwise, saves val to CommentedMap or CommentedSeq. + """ + if keys is None: + keys = [] + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if isinstance(val, Saveable): + return val.save( + top=top, base_url=base_url, relative_uris=relative_uris, keys=keys + ) + if isinstance(val, MutableSequence): + r = CommentedSeq() + r.lc.data = {} + for i in range(0, len(val)): + new_keys = keys + if doc: + if str(i) in doc: + r.lc.data[i] = doc.lc.data[i] + new_keys.append(i) + r.append( + save( + val[i], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, + ) + ) + return r + # return [ + # save(v, top=False, base_url=base_url, relative_uris=relative_uris) + # for v in val + # ] + if isinstance(val, MutableMapping): + newdict = CommentedMap() + new_keys = keys + for key in val: + if doc: + if key in doc: + newdict.lc.add_kv_line_col(key, doc.lc.data[key]) + new_keys.append(key) + + newdict[key] = save( + val[key], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, + ) + return newdict + # newdict = {} + # for key in val: + # newdict[key] = save( + # val[key], top=False, base_url=base_url, relative_uris=relative_uris + # ) + # return newdict + if val is None or isinstance(val, (int, float, bool, str)): + return val + raise Exception("Not Saveable: %s" % type(val)) + + +def save_with_metadata( + val: Any, + valLoadingOpts: LoadingOptions, + top: bool = True, + base_url: str = "", + relative_uris: bool = True, +) -> save_type: + """Save and set $namespaces, $schemas, $base and any other metadata fields at the top level.""" + saved_val = save(val, top, base_url, relative_uris) + newdict: MutableMapping[str, Any] = {} + if isinstance(saved_val, MutableSequence): + newdict = {"$graph": saved_val} + elif isinstance(saved_val, MutableMapping): + newdict = saved_val + + if valLoadingOpts.namespaces: + newdict["$namespaces"] = valLoadingOpts.namespaces + if valLoadingOpts.schemas: + newdict["$schemas"] = valLoadingOpts.schemas + if valLoadingOpts.baseuri: + newdict["$base"] = valLoadingOpts.baseuri + for k, v in valLoadingOpts.addl_metadata.items(): + if k not in newdict: + newdict[k] = v + + return newdict + + +def expand_url( + url, # type: str + base_url, # type: str + loadingOptions, # type: LoadingOptions + scoped_id=False, # type: bool + vocab_term=False, # type: bool + scoped_ref=None, # type: Optional[int] +): + # type: (...) -> str + if url in ("@id", "@type"): + return url + + if vocab_term and url in loadingOptions.vocab: + return url + + if bool(loadingOptions.vocab) and ":" in url: + prefix = url.split(":")[0] + if prefix in loadingOptions.vocab: + url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :] + + split = urlsplit(url) + + if ( + ( + bool(split.scheme) + and split.scheme in loadingOptions.fetcher.supported_schemes() + ) + or url.startswith("$(") + or url.startswith("${") + ): + pass + elif scoped_id and not bool(split.fragment): + splitbase = urlsplit(base_url) + frg = "" + if bool(splitbase.fragment): + frg = splitbase.fragment + "/" + split.path + else: + frg = split.path + pt = splitbase.path if splitbase.path != "" else "/" + url = urlunsplit((splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg)) + elif scoped_ref is not None and not bool(split.fragment): + splitbase = urlsplit(base_url) + sp = splitbase.fragment.split("/") + n = scoped_ref + while n > 0 and len(sp) > 0: + sp.pop() + n -= 1 + sp.append(url) + url = urlunsplit( + ( + splitbase.scheme, + splitbase.netloc, + splitbase.path, + splitbase.query, + "/".join(sp), + ) + ) + else: + url = loadingOptions.fetcher.urljoin(base_url, url) + + if vocab_term: + split = urlsplit(url) + if bool(split.scheme): + if url in loadingOptions.rvocab: + return loadingOptions.rvocab[url] + else: + raise ValidationException(f"Term '{url}' not in vocabulary") + + return url + + +class _Loader: + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + pass + + +class _AnyLoader(_Loader): + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if doc is not None: + return doc + raise ValidationException("Expected non-null") + + +class _PrimitiveLoader(_Loader): + def __init__(self, tp): + # type: (Union[type, Tuple[Type[str], Type[str]]]) -> None + self.tp = tp + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, self.tp): + raise ValidationException( + "Expected a {} but got {}".format( + self.tp.__class__.__name__, doc.__class__.__name__ + ) + ) + return doc + + def __repr__(self): # type: () -> str + return str(self.tp) + + +class _ArrayLoader(_Loader): + def __init__(self, items): + # type: (_Loader) -> None + self.items = items + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, MutableSequence): + raise ValidationException(f"Expected a list, was {type(doc)}") + r = [] # type: List[Any] + errors = [] # type: List[SchemaSaladException] + for i in range(0, len(doc)): + try: + lf = load_field( + doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions + ) + if isinstance(lf, MutableSequence): + r.extend(lf) + else: + r.append(lf) + except ValidationException as e: + errors.append(e.with_sourceline(SourceLine(doc, i, str))) + if errors: + raise ValidationException("", None, errors) + return r + + def __repr__(self): # type: () -> str + return f"array<{self.items}>" + + +class _EnumLoader(_Loader): + def __init__(self, symbols: Sequence[str], name: str) -> None: + self.symbols = symbols + self.name = name + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if doc in self.symbols: + return doc + else: + raise ValidationException(f"Expected one of {self.symbols}") + + def __repr__(self): # type: () -> str + return self.name + + +class _SecondaryDSLLoader(_Loader): + def __init__(self, inner): + # type: (_Loader) -> None + self.inner = inner + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + r: List[Dict[str, Any]] = [] + if isinstance(doc, MutableSequence): + for d in doc: + if isinstance(d, str): + if d.endswith("?"): + r.append({"pattern": d[:-1], "required": False}) + else: + r.append({"pattern": d}) + elif isinstance(d, dict): + new_dict: Dict[str, Any] = {} + dict_copy = copy.deepcopy(d) + if "pattern" in dict_copy: + new_dict["pattern"] = dict_copy.pop("pattern") + else: + raise ValidationException( + "Missing pattern in secondaryFiles specification entry: {}".format( + d + ) + ) + new_dict["required"] = ( + dict_copy.pop("required") if "required" in dict_copy else None + ) + + if len(dict_copy): + raise ValidationException( + "Unallowed values in secondaryFiles specification entry: {}".format( + dict_copy + ) + ) + r.append(new_dict) + + else: + raise ValidationException( + "Expected a string or sequence of (strings or mappings)." + ) + elif isinstance(doc, MutableMapping): + new_dict = {} + doc_copy = copy.deepcopy(doc) + if "pattern" in doc_copy: + new_dict["pattern"] = doc_copy.pop("pattern") + else: + raise ValidationException( + "Missing pattern in secondaryFiles specification entry: {}".format( + doc + ) + ) + new_dict["required"] = ( + doc_copy.pop("required") if "required" in doc_copy else None + ) + + if len(doc_copy): + raise ValidationException( + "Unallowed values in secondaryFiles specification entry: {}".format( + doc_copy + ) + ) + r.append(new_dict) + + elif isinstance(doc, str): + if doc.endswith("?"): + r.append({"pattern": doc[:-1], "required": False}) + else: + r.append({"pattern": doc}) + else: + raise ValidationException("Expected str or sequence of str") + return self.inner.load(r, baseuri, loadingOptions, docRoot) + + +class _RecordLoader(_Loader): + def __init__(self, classtype): + # type: (Type[Saveable]) -> None + self.classtype = classtype + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, MutableMapping): + raise ValidationException(f"Expected a dict, was {type(doc)}") + return self.classtype.fromDoc(doc, baseuri, loadingOptions, docRoot=docRoot) + + def __repr__(self): # type: () -> str + return str(self.classtype.__name__) + + +class _ExpressionLoader(_Loader): + def __init__(self, items: Type[str]) -> None: + self.items = items + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, str): + raise ValidationException(f"Expected a str, was {type(doc)}") + return doc + + +class _UnionLoader(_Loader): + def __init__(self, alternates: Sequence[_Loader]) -> None: + self.alternates = alternates + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + errors = [] + for t in self.alternates: + try: + return t.load(doc, baseuri, loadingOptions, docRoot=docRoot) + except ValidationException as e: + errors.append(ValidationException(f"tried {t} but", None, [e])) + raise ValidationException("", None, errors, "-") + + def __repr__(self): # type: () -> str + return " | ".join(str(a) for a in self.alternates) + + +class _URILoader(_Loader): + def __init__(self, inner, scoped_id, vocab_term, scoped_ref): + # type: (_Loader, bool, bool, Union[int, None]) -> None + self.inner = inner + self.scoped_id = scoped_id + self.vocab_term = vocab_term + self.scoped_ref = scoped_ref + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableSequence): + newdoc = [] + for i in doc: + if isinstance(i, str): + newdoc.append( + expand_url( + i, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + ) + else: + newdoc.append(i) + doc = newdoc + elif isinstance(doc, str): + doc = expand_url( + doc, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + return self.inner.load(doc, baseuri, loadingOptions) + + +class _TypeDSLLoader(_Loader): + typeDSLregex = re.compile(r"^([^[?]+)(\[\])?(\?)?$") + + def __init__(self, inner, refScope): + # type: (_Loader, Union[int, None]) -> None + self.inner = inner + self.refScope = refScope + + def resolve( + self, + doc, # type: str + baseuri, # type: str + loadingOptions, # type: LoadingOptions + ): + # type: (...) -> Union[List[Union[Dict[str, str], str]], Dict[str, str], str] + m = self.typeDSLregex.match(doc) + if m: + group1 = m.group(1) + assert group1 is not None # nosec + first = expand_url( + group1, baseuri, loadingOptions, False, True, self.refScope + ) + second = third = None + if bool(m.group(2)): + second = {"type": "array", "items": first} + # second = CommentedMap((("type", "array"), + # ("items", first))) + # second.lc.add_kv_line_col("type", lc) + # second.lc.add_kv_line_col("items", lc) + # second.lc.filename = filename + if bool(m.group(3)): + third = ["null", second or first] + # third = CommentedSeq(["null", second or first]) + # third.lc.add_kv_line_col(0, lc) + # third.lc.add_kv_line_col(1, lc) + # third.lc.filename = filename + return third or second or first + return doc + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableSequence): + r = [] # type: List[Any] + for d in doc: + if isinstance(d, str): + resolved = self.resolve(d, baseuri, loadingOptions) + if isinstance(resolved, MutableSequence): + for i in resolved: + if i not in r: + r.append(i) + else: + if resolved not in r: + r.append(resolved) + else: + r.append(d) + doc = r + elif isinstance(doc, str): + doc = self.resolve(doc, baseuri, loadingOptions) + + return self.inner.load(doc, baseuri, loadingOptions) + + +class _IdMapLoader(_Loader): + def __init__(self, inner, mapSubject, mapPredicate): + # type: (_Loader, str, Union[str, None]) -> None + self.inner = inner + self.mapSubject = mapSubject + self.mapPredicate = mapPredicate + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableMapping): + r = [] # type: List[Any] + for k in sorted(doc.keys()): + val = doc[k] + if isinstance(val, CommentedMap): + v = copy.copy(val) + v.lc.data = val.lc.data + v.lc.filename = val.lc.filename + v[self.mapSubject] = k + r.append(v) + elif isinstance(val, MutableMapping): + v2 = copy.copy(val) + v2[self.mapSubject] = k + r.append(v2) + else: + if self.mapPredicate: + v3 = {self.mapPredicate: val} + v3[self.mapSubject] = k + r.append(v3) + else: + raise ValidationException("No mapPredicate") + doc = r + return self.inner.load(doc, baseuri, loadingOptions) + + +def _document_load( + loader: _Loader, + doc: Union[CommentedMap, str, MutableMapping[str, Any], MutableSequence[Any]], + baseuri: str, + loadingOptions: LoadingOptions, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> Tuple[Any, LoadingOptions]: + if isinstance(doc, str): + return _document_load_by_url( + loader, + loadingOptions.fetcher.urljoin(baseuri, doc), + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + if isinstance(doc, MutableMapping): + addl_metadata = {} + if addl_metadata_fields is not None: + for mf in addl_metadata_fields: + if mf in doc: + addl_metadata[mf] = doc[mf] + + docuri = baseuri + if "$base" in doc: + baseuri = doc["$base"] + + loadingOptions = LoadingOptions( + copyfrom=loadingOptions, + namespaces=doc.get("$namespaces", None), + schemas=doc.get("$schemas", None), + baseuri=doc.get("$base", None), + addl_metadata=addl_metadata, + ) + + # doc = { + # k: v + # for k, v in doc.items() + # if k not in ("$namespaces", "$schemas", "$base") + # } + doc = copy.copy(doc) + if "$namespaces" in doc: + doc.pop("$namespaces") + if "$schemas" in doc: + doc.pop("$schemas") + if "$base" in doc: + doc.pop("$base") + + if isinstance(doc, CommentedMap): + global doc_line_info + doc_line_info = doc + + if "$graph" in doc: + loadingOptions.idx[baseuri] = ( + loader.load(doc["$graph"], baseuri, loadingOptions), + loadingOptions, + ) + else: + loadingOptions.idx[baseuri] = ( + loader.load(doc, baseuri, loadingOptions, docRoot=baseuri), + loadingOptions, + ) + + if docuri != baseuri: + loadingOptions.idx[docuri] = loadingOptions.idx[baseuri] + + return loadingOptions.idx[baseuri] + if isinstance(doc, MutableSequence): + loadingOptions.idx[baseuri] = ( + loader.load(doc, baseuri, loadingOptions), + loadingOptions, + ) + return loadingOptions.idx[baseuri] + + raise ValidationException( + "Expected URI string, MutableMapping or MutableSequence, got %s" % type(doc) + ) + + +def _document_load_by_url( + loader: _Loader, + url: str, + loadingOptions: LoadingOptions, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> Tuple[Any, LoadingOptions]: + if url in loadingOptions.idx: + return loadingOptions.idx[url] + + doc_url, frg = urldefrag(url) + + text = loadingOptions.fetcher.fetch_text(doc_url) + if isinstance(text, bytes): + textIO = StringIO(text.decode("utf-8")) + else: + textIO = StringIO(text) + textIO.name = str(doc_url) + yaml = yaml_no_ts() + result = yaml.load(textIO) + add_lc_filename(result, doc_url) + + loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=doc_url) + + _document_load( + loader, + result, + doc_url, + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + return loadingOptions.idx[url] + + +def file_uri(path, split_frag=False): # type: (str, bool) -> str + if path.startswith("file://"): + return path + if split_frag: + pathsp = path.split("#", 2) + frag = "#" + quote(str(pathsp[1])) if len(pathsp) == 2 else "" + urlpath = pathname2url(str(pathsp[0])) + else: + urlpath = pathname2url(path) + frag = "" + if urlpath.startswith("//"): + return f"file:{urlpath}{frag}" + else: + return f"file://{urlpath}{frag}" + + +def prefix_url(url: str, namespaces: Dict[str, str]) -> str: + """Expand short forms into full URLs using the given namespace dictionary.""" + for k, v in namespaces.items(): + if url.startswith(v): + return k + ":" + url[len(v) :] + return url + + +def save_relative_uri( + uri: Any, + base_url: str, + scoped_id: bool, + ref_scope: Optional[int], + relative_uris: bool, +) -> Any: + """Convert any URI to a relative one, obeying the scoping rules.""" + if isinstance(uri, MutableSequence): + return [ + save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) + for u in uri + ] + elif isinstance(uri, str): + if not relative_uris or uri == base_url: + return uri + urisplit = urlsplit(uri) + basesplit = urlsplit(base_url) + if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: + if urisplit.path != basesplit.path: + p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) + if urisplit.fragment: + p = p + "#" + urisplit.fragment + return p + + basefrag = basesplit.fragment + "/" + if ref_scope: + sp = basefrag.split("/") + i = 0 + while i < ref_scope: + sp.pop() + i += 1 + basefrag = "/".join(sp) + + if urisplit.fragment.startswith(basefrag): + return urisplit.fragment[len(basefrag) :] + else: + return urisplit.fragment + return uri + else: + return save(uri, top=False, base_url=base_url, relative_uris=relative_uris) + + +def shortname(inputid: str) -> str: + """ + Compute the shortname of a fully qualified identifier. + + See https://w3id.org/cwl/v1.2/SchemaSalad.html#Short_names. + """ + parsed_id = urlparse(inputid) + if parsed_id.fragment: + return parsed_id.fragment.split("/")[-1] + return parsed_id.path.split("/")[-1] + + +def parser_info() -> str: + return "org.w3id.cwl.v1_1" + + +class Documented(Saveable): + pass + + +class RecordField(Documented): + """ + A field of a record. + """ + + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, RecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash((self.doc, self.name, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "RecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'RecordField'", None, _errors__) + _constructed = cls( + doc=doc, + name=name, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["doc", "name", "type"]) + + +class RecordSchema(Saveable): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, RecordSchema): + return bool(self.fields == other.fields and self.type == other.type) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "RecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'RecordSchema'", None, _errors__) + _constructed = cls( + fields=fields, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type"]) + + +class EnumSchema(Saveable): + """ + Define an enumerated type. + + """ + + def __init__( + self, + symbols: Any, + type: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.symbols = symbols + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnumSchema): + return bool(self.symbols == other.symbols and self.type == other.type) + return False + + def __hash__(self) -> int: + return hash((self.symbols, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "EnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `symbols`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'EnumSchema'", None, _errors__) + _constructed = cls( + symbols=symbols, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) + r["symbols"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["symbols", "type"]) + + +class ArraySchema(Saveable): + def __init__( + self, + items: Any, + type: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ArraySchema): + return bool(self.items == other.items and self.type == other.type) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + items = load_field( + _doc.get("items"), + typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ArraySchema'", None, _errors__) + _constructed = cls( + items=items, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.items is not None and "items" not in r: + r["items"] = save( + self.items, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type"]) + + +class File(Saveable): + """ + Represents a file (or group of files when `secondaryFiles` is provided) that + will be accessible by tools using standard POSIX file system call API such as + open(2) and read(2). + + Files are represented as objects with `class` of `File`. File objects have + a number of properties that provide metadata about the file. + + The `location` property of a File is a URI that uniquely identifies the + file. Implementations must support the file:// URI scheme and may support + other schemes such as http://. The value of `location` may also be a + relative reference, in which case it must be resolved relative to the URI + of the document it appears in. Alternately to `location`, implementations + must also accept the `path` property on File, which must be a filesystem + path available on the same host as the CWL runner (for inputs) or the + runtime environment of a command line tool execution (for command line tool + outputs). + + If no `location` or `path` is specified, a file object must specify + `contents` with the UTF-8 text content of the file. This is a "file + literal". File literals do not correspond to external resources, but are + created on disk with `contents` with when needed for a executing a tool. + Where appropriate, expressions can return file literals to define new files + on a runtime. The maximum size of `contents` is 64 kilobytes. + + The `basename` property defines the filename on disk where the file is + staged. This may differ from the resource name. If not provided, + `basename` must be computed from the last path part of `location` and made + available to expressions. + + The `secondaryFiles` property is a list of File or Directory objects that + must be staged in the same directory as the primary file. It is an error + for file names to be duplicated in `secondaryFiles`. + + The `size` property is the size in bytes of the File. It must be computed + from the resource and made available to expressions. The `checksum` field + contains a cryptographic hash of the file content for use it verifying file + contents. Implementations may, at user option, enable or disable + computation of the `checksum` field for performance or other reasons. + However, the ability to compute output checksums is required to pass the + CWL conformance test suite. + + When executing a CommandLineTool, the files and secondary files may be + staged to an arbitrary directory, but must use the value of `basename` for + the filename. The `path` property must be file path in the context of the + tool execution runtime (local to the compute node, or within the executing + container). All computed properties should be available to expressions. + File literals also must be staged and `path` must be set. + + When collecting CommandLineTool outputs, `glob` matching returns file paths + (with the `path` property) and the derived properties. This can all be + modified by `outputEval`. Alternately, if the file `cwl.output.json` is + present in the output, `outputBinding` is ignored. + + File objects in the output must provide either a `location` URI or a `path` + property in the context of the tool execution runtime (local to the compute + node, or within the executing container). + + When evaluating an ExpressionTool, file objects must be referenced via + `location` (the expression tool does not have access to files on disk so + `path` is meaningless) or as file literals. It is legal to return a file + object with an existing `location` but a different `basename`. The + `loadContents` field of ExpressionTool inputs behaves the same as on + CommandLineTool inputs, however it is not meaningful on the outputs. + + An ExpressionTool may forward file references from input to output by using + the same value for `location`. + + """ + + def __init__( + self, + location: Optional[Any] = None, + path: Optional[Any] = None, + basename: Optional[Any] = None, + dirname: Optional[Any] = None, + nameroot: Optional[Any] = None, + nameext: Optional[Any] = None, + checksum: Optional[Any] = None, + size: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + format: Optional[Any] = None, + contents: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "File" + self.location = location + self.path = path + self.basename = basename + self.dirname = dirname + self.nameroot = nameroot + self.nameext = nameext + self.checksum = checksum + self.size = size + self.secondaryFiles = secondaryFiles + self.format = format + self.contents = contents + + def __eq__(self, other: Any) -> bool: + if isinstance(other, File): + return bool( + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.dirname == other.dirname + and self.nameroot == other.nameroot + and self.nameext == other.nameext + and self.checksum == other.checksum + and self.size == other.size + and self.secondaryFiles == other.secondaryFiles + and self.format == other.format + and self.contents == other.contents + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.location, + self.path, + self.basename, + self.dirname, + self.nameroot, + self.nameext, + self.checksum, + self.size, + self.secondaryFiles, + self.format, + self.contents, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "File": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "File": + raise ValidationException("Not a File") + + if "location" in _doc: + try: + location = load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), + [e], + ) + ) + else: + location = None + if "path" in _doc: + try: + path = load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [e], + ) + ) + else: + path = None + if "basename" in _doc: + try: + basename = load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + ) + ) + else: + basename = None + if "dirname" in _doc: + try: + dirname = load_field( + _doc.get("dirname"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dirname` field is not valid because:", + SourceLine(_doc, "dirname", str), + [e], + ) + ) + else: + dirname = None + if "nameroot" in _doc: + try: + nameroot = load_field( + _doc.get("nameroot"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `nameroot` field is not valid because:", + SourceLine(_doc, "nameroot", str), + [e], + ) + ) + else: + nameroot = None + if "nameext" in _doc: + try: + nameext = load_field( + _doc.get("nameext"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `nameext` field is not valid because:", + SourceLine(_doc, "nameext", str), + [e], + ) + ) + else: + nameext = None + if "checksum" in _doc: + try: + checksum = load_field( + _doc.get("checksum"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `checksum` field is not valid because:", + SourceLine(_doc, "checksum", str), + [e], + ) + ) + else: + checksum = None + if "size" in _doc: + try: + size = load_field( + _doc.get("size"), + union_of_None_type_or_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `size` field is not valid because:", + SourceLine(_doc, "size", str), + [e], + ) + ) + else: + size = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "contents" in _doc: + try: + contents = load_field( + _doc.get("contents"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `contents` field is not valid because:", + SourceLine(_doc, "contents", str), + [e], + ) + ) + else: + contents = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `dirname`, `nameroot`, `nameext`, `checksum`, `size`, `secondaryFiles`, `format`, `contents`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'File'", None, _errors__) + _constructed = cls( + location=location, + path=path, + basename=basename, + dirname=dirname, + nameroot=nameroot, + nameext=nameext, + checksum=checksum, + size=size, + secondaryFiles=secondaryFiles, + format=format, + contents=contents, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "File" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.location is not None and "location" not in r: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="location", + val=r.get("location"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.path is not None and "path" not in r: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="path", + val=r.get("path"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.basename is not None and "basename" not in r: + r["basename"] = save( + self.basename, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="basename", + val=r.get("basename"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dirname is not None and "dirname" not in r: + r["dirname"] = save( + self.dirname, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dirname", + val=r.get("dirname"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.nameroot is not None and "nameroot" not in r: + r["nameroot"] = save( + self.nameroot, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="nameroot", + val=r.get("nameroot"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.nameext is not None and "nameext" not in r: + r["nameext"] = save( + self.nameext, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="nameext", + val=r.get("nameext"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.checksum is not None and "checksum" not in r: + r["checksum"] = save( + self.checksum, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="checksum", + val=r.get("checksum"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.size is not None and "size" not in r: + r["size"] = save( + self.size, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="size", + val=r.get("size"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, base_url, True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.contents is not None and "contents" not in r: + r["contents"] = save( + self.contents, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="contents", + val=r.get("contents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "location", + "path", + "basename", + "dirname", + "nameroot", + "nameext", + "checksum", + "size", + "secondaryFiles", + "format", + "contents", + ] + ) + + +class Directory(Saveable): + """ + Represents a directory to present to a command line tool. + + Directories are represented as objects with `class` of `Directory`. Directory objects have + a number of properties that provide metadata about the directory. + + The `location` property of a Directory is a URI that uniquely identifies + the directory. Implementations must support the file:// URI scheme and may + support other schemes such as http://. Alternately to `location`, + implementations must also accept the `path` property on Directory, which + must be a filesystem path available on the same host as the CWL runner (for + inputs) or the runtime environment of a command line tool execution (for + command line tool outputs). + + A Directory object may have a `listing` field. This is a list of File and + Directory objects that are contained in the Directory. For each entry in + `listing`, the `basename` property defines the name of the File or + Subdirectory when staged to disk. If `listing` is not provided, the + implementation must have some way of fetching the Directory listing at + runtime based on the `location` field. + + If a Directory does not have `location`, it is a Directory literal. A + Directory literal must provide `listing`. Directory literals must be + created on disk at runtime as needed. + + The resources in a Directory literal do not need to have any implied + relationship in their `location`. For example, a Directory listing may + contain two files located on different hosts. It is the responsibility of + the runtime to ensure that those files are staged to disk appropriately. + Secondary files associated with files in `listing` must also be staged to + the same Directory. + + When executing a CommandLineTool, Directories must be recursively staged + first and have local values of `path` assigend. + + Directory objects in CommandLineTool output must provide either a + `location` URI or a `path` property in the context of the tool execution + runtime (local to the compute node, or within the executing container). + + An ExpressionTool may forward file references from input to output by using + the same value for `location`. + + Name conflicts (the same `basename` appearing multiple times in `listing` + or in any entry in `secondaryFiles` in the listing) is a fatal error. + + """ + + def __init__( + self, + location: Optional[Any] = None, + path: Optional[Any] = None, + basename: Optional[Any] = None, + listing: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "Directory" + self.location = location + self.path = path + self.basename = basename + self.listing = listing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Directory): + return bool( + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.listing == other.listing + ) + return False + + def __hash__(self) -> int: + return hash( + (self.class_, self.location, self.path, self.basename, self.listing) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Directory": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "Directory": + raise ValidationException("Not a Directory") + + if "location" in _doc: + try: + location = load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), + [e], + ) + ) + else: + location = None + if "path" in _doc: + try: + path = load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [e], + ) + ) + else: + path = None + if "basename" in _doc: + try: + basename = load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + ) + ) + else: + basename = None + if "listing" in _doc: + try: + listing = load_field( + _doc.get("listing"), + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [e], + ) + ) + else: + listing = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `listing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Directory'", None, _errors__) + _constructed = cls( + location=location, + path=path, + basename=basename, + listing=listing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "Directory" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.location is not None and "location" not in r: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="location", + val=r.get("location"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.path is not None and "path" not in r: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="path", + val=r.get("path"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.basename is not None and "basename" not in r: + r["basename"] = save( + self.basename, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="basename", + val=r.get("basename"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.listing is not None and "listing" not in r: + r["listing"] = save( + self.listing, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="listing", + val=r.get("listing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "location", "path", "basename", "listing"]) + + +class Labeled(Saveable): + pass + + +class Identified(Saveable): + pass + + +class LoadContents(Saveable): + pass + + +class FieldBase(Labeled): + pass + + +class InputFormat(Saveable): + pass + + +class OutputFormat(Saveable): + pass + + +class Parameter(FieldBase, Documented, Identified): + """ + Define an input or output parameter to a process. + + """ + + pass + + +class InputBinding(Saveable): + def __init__( + self, + loadContents: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputBinding): + return bool(self.loadContents == other.loadContents) + return False + + def __hash__(self) -> int: + return hash((self.loadContents)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputBinding": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `loadContents`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputBinding'", None, _errors__) + _constructed = cls( + loadContents=loadContents, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["loadContents"]) + + +class IOSchema(Labeled, Documented): + pass + + +class InputSchema(IOSchema): + pass + + +class OutputSchema(IOSchema): + pass + + +class InputRecordField(RecordField, FieldBase, InputFormat, LoadContents): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.loadContents, + self.loadListing, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputRecordField'", None, _errors__) + _constructed = cls( + doc=doc, + name=name, + type=type, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + loadContents=loadContents, + loadListing=loadListing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "loadContents", + "loadListing", + ] + ) + + +class InputRecordSchema(RecordSchema, InputSchema): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputRecordSchema'", None, _errors__) + _constructed = cls( + fields=fields, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name"]) + + +class InputEnumSchema(EnumSchema, InputSchema): + def __init__( + self, + symbols: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.symbols = symbols + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputEnumSchema): + return bool( + self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.symbols, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `symbols`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputEnumSchema'", None, _errors__) + _constructed = cls( + symbols=symbols, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["symbols", "type", "label", "doc", "name"]) + + +class InputArraySchema(ArraySchema, InputSchema): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + items = load_field( + _doc.get("items"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputArraySchema'", None, _errors__) + _constructed = cls( + items=items, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.items is not None and "items" not in r: + r["items"] = save( + self.items, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name"]) + + +class OutputRecordField(RecordField, FieldBase, OutputFormat): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputRecordField'", None, _errors__) + _constructed = cls( + doc=doc, + name=name, + type=type, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + ["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"] + ) + + +class OutputRecordSchema(RecordSchema, OutputSchema): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputRecordSchema'", None, _errors__) + _constructed = cls( + fields=fields, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name"]) + + +class OutputEnumSchema(EnumSchema, OutputSchema): + def __init__( + self, + symbols: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.symbols = symbols + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputEnumSchema): + return bool( + self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.symbols, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `symbols`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputEnumSchema'", None, _errors__) + _constructed = cls( + symbols=symbols, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["symbols", "type", "label", "doc", "name"]) + + +class OutputArraySchema(ArraySchema, OutputSchema): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + items = load_field( + _doc.get("items"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputArraySchema'", None, _errors__) + _constructed = cls( + items=items, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.items is not None and "items" not in r: + r["items"] = save( + self.items, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name"]) + + +class InputParameter(Parameter, InputFormat, LoadContents): + pass + + +class OutputParameter(Parameter, OutputFormat): + pass + + +class ProcessRequirement(Saveable): + """ + A process requirement declares a prerequisite that may or must be fulfilled + before executing a process. See [`Process.hints`](#process) and + [`Process.requirements`](#process). + + Process requirements are the primary mechanism for specifying extensions to + the CWL core specification. + + """ + + pass + + +class Process(Identified, Labeled, Documented): + """ + + The base executable type in CWL is the `Process` object defined by the + document. Note that the `Process` object is abstract and cannot be + directly executed. + + """ + + pass + + +class InlineJavascriptRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support inline Javascript expressions. + If this requirement is not present, the workflow platform must not perform expression + interpolatation. + + """ + + def __init__( + self, + expressionLib: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "InlineJavascriptRequirement" + self.expressionLib = expressionLib + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InlineJavascriptRequirement): + return bool( + self.class_ == other.class_ + and self.expressionLib == other.expressionLib + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.expressionLib)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InlineJavascriptRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "InlineJavascriptRequirement": + raise ValidationException("Not a InlineJavascriptRequirement") + + if "expressionLib" in _doc: + try: + expressionLib = load_field( + _doc.get("expressionLib"), + union_of_None_type_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `expressionLib` field is not valid because:", + SourceLine(_doc, "expressionLib", str), + [e], + ) + ) + else: + expressionLib = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `expressionLib`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'InlineJavascriptRequirement'", None, _errors__ + ) + _constructed = cls( + expressionLib=expressionLib, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "InlineJavascriptRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.expressionLib is not None and "expressionLib" not in r: + r["expressionLib"] = save( + self.expressionLib, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="expressionLib", + val=r.get("expressionLib"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "expressionLib"]) + + +class CommandInputSchema(Saveable): + pass + + +class SchemaDefRequirement(ProcessRequirement): + """ + This field consists of an array of type definitions which must be used when + interpreting the `inputs` and `outputs` fields. When a `type` field + contain a IRI, the implementation must check if the type is defined in + `schemaDefs` and use that definition. If the type is not found in + `schemaDefs`, it is an error. The entries in `schemaDefs` must be + processed in the order listed such that later schema definitions may refer + to earlier schema definitions. + + """ + + def __init__( + self, + types: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SchemaDefRequirement" + self.types = types + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SchemaDefRequirement): + return bool(self.class_ == other.class_ and self.types == other.types) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.types)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SchemaDefRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "SchemaDefRequirement": + raise ValidationException("Not a SchemaDefRequirement") + + try: + types = load_field( + _doc.get("types"), + array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `types` field is not valid because:", + SourceLine(_doc, "types", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `types`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SchemaDefRequirement'", None, _errors__) + _constructed = cls( + types=types, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "SchemaDefRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.types is not None and "types" not in r: + r["types"] = save( + self.types, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="types", + val=r.get("types"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "types"]) + + +class SecondaryFileSchema(Saveable): + def __init__( + self, + pattern: Any, + required: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.pattern = pattern + self.required = required + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SecondaryFileSchema): + return bool( + self.pattern == other.pattern and self.required == other.required + ) + return False + + def __hash__(self) -> int: + return hash((self.pattern, self.required)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SecondaryFileSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + pattern = load_field( + _doc.get("pattern"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `pattern` field is not valid because:", + SourceLine(_doc, "pattern", str), + [e], + ) + ) + if "required" in _doc: + try: + required = load_field( + _doc.get("required"), + union_of_None_type_or_booltype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `required` field is not valid because:", + SourceLine(_doc, "required", str), + [e], + ) + ) + else: + required = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `pattern`, `required`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SecondaryFileSchema'", None, _errors__) + _constructed = cls( + pattern=pattern, + required=required, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.pattern is not None and "pattern" not in r: + r["pattern"] = save( + self.pattern, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="pattern", + val=r.get("pattern"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.required is not None and "required" not in r: + r["required"] = save( + self.required, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="required", + val=r.get("required"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["pattern", "required"]) + + +class LoadListingRequirement(ProcessRequirement): + """ + Specify the desired behavior for loading the `listing` field of + a Directory object for use by expressions. + + """ + + def __init__( + self, + loadListing: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "LoadListingRequirement" + self.loadListing = loadListing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, LoadListingRequirement): + return bool( + self.class_ == other.class_ and self.loadListing == other.loadListing + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.loadListing)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "LoadListingRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "LoadListingRequirement": + raise ValidationException("Not a LoadListingRequirement") + + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `loadListing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'LoadListingRequirement'", None, _errors__ + ) + _constructed = cls( + loadListing=loadListing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "LoadListingRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "loadListing"]) + + +class EnvironmentDef(Saveable): + """ + Define an environment variable that will be set in the runtime environment + by the workflow platform when executing the command line tool. May be the + result of executing an expression, such as getting a parameter from input. + + """ + + def __init__( + self, + envName: Any, + envValue: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.envName = envName + self.envValue = envValue + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnvironmentDef): + return bool( + self.envName == other.envName and self.envValue == other.envValue + ) + return False + + def __hash__(self) -> int: + return hash((self.envName, self.envValue)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "EnvironmentDef": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + envName = load_field( + _doc.get("envName"), + strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `envName` field is not valid because:", + SourceLine(_doc, "envName", str), + [e], + ) + ) + try: + envValue = load_field( + _doc.get("envValue"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `envValue` field is not valid because:", + SourceLine(_doc, "envValue", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `envName`, `envValue`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'EnvironmentDef'", None, _errors__) + _constructed = cls( + envName=envName, + envValue=envValue, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.envName is not None and "envName" not in r: + r["envName"] = save( + self.envName, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="envName", + val=r.get("envName"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.envValue is not None and "envValue" not in r: + r["envValue"] = save( + self.envValue, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="envValue", + val=r.get("envValue"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["envName", "envValue"]) + + +class CommandLineBinding(InputBinding): + """ + + When listed under `inputBinding` in the input schema, the term + "value" refers to the the corresponding value in the input object. For + binding objects listed in `CommandLineTool.arguments`, the term "value" + refers to the effective value after evaluating `valueFrom`. + + The binding behavior when building the command line depends on the data + type of the value. If there is a mismatch between the type described by + the input schema and the effective value, such as resulting from an + expression evaluation, an implementation must use the data type of the + effective value. + + - **string**: Add `prefix` and the string to the command line. + + - **number**: Add `prefix` and decimal representation to command line. + + - **boolean**: If true, add `prefix` to the command line. If false, add + nothing. + + - **File**: Add `prefix` and the value of + [`File.path`](#File) to the command line. + + - **Directory**: Add `prefix` and the value of + [`Directory.path`](#Directory) to the command line. + + - **array**: If `itemSeparator` is specified, add `prefix` and the join + the array into a single string with `itemSeparator` separating the + items. Otherwise first add `prefix`, then recursively process + individual elements. + If the array is empty, it does not add anything to command line. + + - **object**: Add `prefix` only, and recursively add object fields for + which `inputBinding` is specified. + + - **null**: Add nothing. + + """ + + def __init__( + self, + loadContents: Optional[Any] = None, + position: Optional[Any] = None, + prefix: Optional[Any] = None, + separate: Optional[Any] = None, + itemSeparator: Optional[Any] = None, + valueFrom: Optional[Any] = None, + shellQuote: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + self.position = position + self.prefix = prefix + self.separate = separate + self.itemSeparator = itemSeparator + self.valueFrom = valueFrom + self.shellQuote = shellQuote + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandLineBinding): + return bool( + self.loadContents == other.loadContents + and self.position == other.position + and self.prefix == other.prefix + and self.separate == other.separate + and self.itemSeparator == other.itemSeparator + and self.valueFrom == other.valueFrom + and self.shellQuote == other.shellQuote + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.loadContents, + self.position, + self.prefix, + self.separate, + self.itemSeparator, + self.valueFrom, + self.shellQuote, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandLineBinding": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "position" in _doc: + try: + position = load_field( + _doc.get("position"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `position` field is not valid because:", + SourceLine(_doc, "position", str), + [e], + ) + ) + else: + position = None + if "prefix" in _doc: + try: + prefix = load_field( + _doc.get("prefix"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `prefix` field is not valid because:", + SourceLine(_doc, "prefix", str), + [e], + ) + ) + else: + prefix = None + if "separate" in _doc: + try: + separate = load_field( + _doc.get("separate"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `separate` field is not valid because:", + SourceLine(_doc, "separate", str), + [e], + ) + ) + else: + separate = None + if "itemSeparator" in _doc: + try: + itemSeparator = load_field( + _doc.get("itemSeparator"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `itemSeparator` field is not valid because:", + SourceLine(_doc, "itemSeparator", str), + [e], + ) + ) + else: + itemSeparator = None + if "valueFrom" in _doc: + try: + valueFrom = load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [e], + ) + ) + else: + valueFrom = None + if "shellQuote" in _doc: + try: + shellQuote = load_field( + _doc.get("shellQuote"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `shellQuote` field is not valid because:", + SourceLine(_doc, "shellQuote", str), + [e], + ) + ) + else: + shellQuote = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `loadContents`, `position`, `prefix`, `separate`, `itemSeparator`, `valueFrom`, `shellQuote`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandLineBinding'", None, _errors__) + _constructed = cls( + loadContents=loadContents, + position=position, + prefix=prefix, + separate=separate, + itemSeparator=itemSeparator, + valueFrom=valueFrom, + shellQuote=shellQuote, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.position is not None and "position" not in r: + r["position"] = save( + self.position, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="position", + val=r.get("position"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.prefix is not None and "prefix" not in r: + r["prefix"] = save( + self.prefix, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="prefix", + val=r.get("prefix"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.separate is not None and "separate" not in r: + r["separate"] = save( + self.separate, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="separate", + val=r.get("separate"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.itemSeparator is not None and "itemSeparator" not in r: + r["itemSeparator"] = save( + self.itemSeparator, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="itemSeparator", + val=r.get("itemSeparator"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.valueFrom is not None and "valueFrom" not in r: + r["valueFrom"] = save( + self.valueFrom, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="valueFrom", + val=r.get("valueFrom"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.shellQuote is not None and "shellQuote" not in r: + r["shellQuote"] = save( + self.shellQuote, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="shellQuote", + val=r.get("shellQuote"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "loadContents", + "position", + "prefix", + "separate", + "itemSeparator", + "valueFrom", + "shellQuote", + ] + ) + + +class CommandOutputBinding(LoadContents): + """ + Describes how to generate an output parameter based on the files produced + by a CommandLineTool. + + The output parameter value is generated by applying these operations in the + following order: + + - glob + - loadContents + - outputEval + - secondaryFiles + + """ + + def __init__( + self, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + glob: Optional[Any] = None, + outputEval: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + self.loadListing = loadListing + self.glob = glob + self.outputEval = outputEval + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputBinding): + return bool( + self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.glob == other.glob + and self.outputEval == other.outputEval + ) + return False + + def __hash__(self) -> int: + return hash((self.loadContents, self.loadListing, self.glob, self.outputEval)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputBinding": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "glob" in _doc: + try: + glob = load_field( + _doc.get("glob"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `glob` field is not valid because:", + SourceLine(_doc, "glob", str), + [e], + ) + ) + else: + glob = None + if "outputEval" in _doc: + try: + outputEval = load_field( + _doc.get("outputEval"), + union_of_None_type_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputEval` field is not valid because:", + SourceLine(_doc, "outputEval", str), + [e], + ) + ) + else: + outputEval = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `loadContents`, `loadListing`, `glob`, `outputEval`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandOutputBinding'", None, _errors__) + _constructed = cls( + loadContents=loadContents, + loadListing=loadListing, + glob=glob, + outputEval=outputEval, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.glob is not None and "glob" not in r: + r["glob"] = save( + self.glob, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="glob", + val=r.get("glob"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputEval is not None and "outputEval" not in r: + r["outputEval"] = save( + self.outputEval, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputEval", + val=r.get("outputEval"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["loadContents", "loadListing", "glob", "outputEval"]) + + +class CommandLineBindable(Saveable): + def __init__( + self, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandLineBindable): + return bool(self.inputBinding == other.inputBinding) + return False + + def __hash__(self) -> int: + return hash((self.inputBinding)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandLineBindable": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandLineBindable'", None, _errors__) + _constructed = cls( + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["inputBinding"]) + + +class CommandInputRecordField(InputRecordField, CommandLineBindable): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.loadContents, + self.loadListing, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputRecordField'", None, _errors__ + ) + _constructed = cls( + doc=doc, + name=name, + type=type, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + loadContents=loadContents, + loadListing=loadListing, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "loadContents", + "loadListing", + "inputBinding", + ] + ) + + +class CommandInputRecordSchema( + InputRecordSchema, CommandInputSchema, CommandLineBindable +): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.doc = doc + self.name = name + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + (self.fields, self.type, self.label, self.doc, self.name, self.inputBinding) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputRecordSchema'", None, _errors__ + ) + _constructed = cls( + fields=fields, + type=type, + label=label, + doc=doc, + name=name, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name", "inputBinding"]) + + +class CommandInputEnumSchema(InputEnumSchema, CommandInputSchema, CommandLineBindable): + def __init__( + self, + symbols: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.symbols = symbols + self.type = type + self.label = label + self.doc = doc + self.name = name + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputEnumSchema): + return bool( + self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.symbols, + self.type, + self.label, + self.doc, + self.name, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `symbols`, `type`, `label`, `doc`, `name`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputEnumSchema'", None, _errors__ + ) + _constructed = cls( + symbols=symbols, + type=type, + label=label, + doc=doc, + name=name, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["symbols", "type", "label", "doc", "name", "inputBinding"]) + + +class CommandInputArraySchema( + InputArraySchema, CommandInputSchema, CommandLineBindable +): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.doc = doc + self.name = name + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + (self.items, self.type, self.label, self.doc, self.name, self.inputBinding) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + items = load_field( + _doc.get("items"), + typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputArraySchema'", None, _errors__ + ) + _constructed = cls( + items=items, + type=type, + label=label, + doc=doc, + name=name, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.items is not None and "items" not in r: + r["items"] = save( + self.items, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name", "inputBinding"]) + + +class CommandOutputRecordField(OutputRecordField): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + outputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.outputBinding = outputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.outputBinding == other.outputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.outputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `outputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputRecordField'", None, _errors__ + ) + _constructed = cls( + doc=doc, + name=name, + type=type, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + outputBinding=outputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "outputBinding", + ] + ) + + +class CommandOutputRecordSchema(OutputRecordSchema): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputRecordSchema'", None, _errors__ + ) + _constructed = cls( + fields=fields, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name"]) + + +class CommandOutputEnumSchema(OutputEnumSchema): + def __init__( + self, + symbols: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.symbols = symbols + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputEnumSchema): + return bool( + self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.symbols, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `symbols`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputEnumSchema'", None, _errors__ + ) + _constructed = cls( + symbols=symbols, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["symbols", "type", "label", "doc", "name"]) + + +class CommandOutputArraySchema(OutputArraySchema): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + items = load_field( + _doc.get("items"), + typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputArraySchema'", None, _errors__ + ) + _constructed = cls( + items=items, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.items is not None and "items" not in r: + r["items"] = save( + self.items, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name"]) + + +class CommandInputParameter(InputParameter): + """ + An input parameter for a CommandLineTool. + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + default: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default + self.type = type + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default + and self.type == other.type + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.loadContents, + self.loadListing, + self.default, + self.type, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandInputParameter'", None, _errors__) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + loadContents=loadContents, + loadListing=loadListing, + default=default, + type=type, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "loadContents", + "loadListing", + "default", + "type", + "inputBinding", + ] + ) + + +class CommandOutputParameter(OutputParameter): + """ + An output parameter for a CommandLineTool. + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + outputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.type = type + self.outputBinding = outputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.type == other.type + and self.outputBinding == other.outputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.type, + self.outputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`, `outputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + type=type, + outputBinding=outputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "type", + "outputBinding", + ] + ) + + +class CommandLineTool(Process): + """ + This defines the schema of the CWL Command Line Tool Description document. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + baseCommand: Optional[Any] = None, + arguments: Optional[Any] = None, + stdin: Optional[Any] = None, + stderr: Optional[Any] = None, + stdout: Optional[Any] = None, + successCodes: Optional[Any] = None, + temporaryFailCodes: Optional[Any] = None, + permanentFailCodes: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.class_ = "CommandLineTool" + self.baseCommand = baseCommand + self.arguments = arguments + self.stdin = stdin + self.stderr = stderr + self.stdout = stdout + self.successCodes = successCodes + self.temporaryFailCodes = temporaryFailCodes + self.permanentFailCodes = permanentFailCodes + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandLineTool): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.class_ == other.class_ + and self.baseCommand == other.baseCommand + and self.arguments == other.arguments + and self.stdin == other.stdin + and self.stderr == other.stderr + and self.stdout == other.stdout + and self.successCodes == other.successCodes + and self.temporaryFailCodes == other.temporaryFailCodes + and self.permanentFailCodes == other.permanentFailCodes + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.class_, + self.baseCommand, + self.arguments, + self.stdin, + self.stderr, + self.stdout, + self.successCodes, + self.temporaryFailCodes, + self.permanentFailCodes, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandLineTool": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "CommandLineTool": + raise ValidationException("Not a CommandLineTool") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_CommandInputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_CommandOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + if "baseCommand" in _doc: + try: + baseCommand = load_field( + _doc.get("baseCommand"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `baseCommand` field is not valid because:", + SourceLine(_doc, "baseCommand", str), + [e], + ) + ) + else: + baseCommand = None + if "arguments" in _doc: + try: + arguments = load_field( + _doc.get("arguments"), + union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `arguments` field is not valid because:", + SourceLine(_doc, "arguments", str), + [e], + ) + ) + else: + arguments = None + if "stdin" in _doc: + try: + stdin = load_field( + _doc.get("stdin"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `stdin` field is not valid because:", + SourceLine(_doc, "stdin", str), + [e], + ) + ) + else: + stdin = None + if "stderr" in _doc: + try: + stderr = load_field( + _doc.get("stderr"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `stderr` field is not valid because:", + SourceLine(_doc, "stderr", str), + [e], + ) + ) + else: + stderr = None + if "stdout" in _doc: + try: + stdout = load_field( + _doc.get("stdout"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `stdout` field is not valid because:", + SourceLine(_doc, "stdout", str), + [e], + ) + ) + else: + stdout = None + if "successCodes" in _doc: + try: + successCodes = load_field( + _doc.get("successCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `successCodes` field is not valid because:", + SourceLine(_doc, "successCodes", str), + [e], + ) + ) + else: + successCodes = None + if "temporaryFailCodes" in _doc: + try: + temporaryFailCodes = load_field( + _doc.get("temporaryFailCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `temporaryFailCodes` field is not valid because:", + SourceLine(_doc, "temporaryFailCodes", str), + [e], + ) + ) + else: + temporaryFailCodes = None + if "permanentFailCodes" in _doc: + try: + permanentFailCodes = load_field( + _doc.get("permanentFailCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `permanentFailCodes` field is not valid because:", + SourceLine(_doc, "permanentFailCodes", str), + [e], + ) + ) + else: + permanentFailCodes = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `class`, `baseCommand`, `arguments`, `stdin`, `stderr`, `stdout`, `successCodes`, `temporaryFailCodes`, `permanentFailCodes`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandLineTool'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + baseCommand=baseCommand, + arguments=arguments, + stdin=stdin, + stderr=stderr, + stdout=stdout, + successCodes=successCodes, + temporaryFailCodes=temporaryFailCodes, + permanentFailCodes=permanentFailCodes, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "CommandLineTool" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.baseCommand is not None and "baseCommand" not in r: + r["baseCommand"] = save( + self.baseCommand, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="baseCommand", + val=r.get("baseCommand"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.arguments is not None and "arguments" not in r: + r["arguments"] = save( + self.arguments, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="arguments", + val=r.get("arguments"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.stdin is not None and "stdin" not in r: + r["stdin"] = save( + self.stdin, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="stdin", + val=r.get("stdin"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.stderr is not None and "stderr" not in r: + r["stderr"] = save( + self.stderr, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="stderr", + val=r.get("stderr"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.stdout is not None and "stdout" not in r: + r["stdout"] = save( + self.stdout, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="stdout", + val=r.get("stdout"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.successCodes is not None and "successCodes" not in r: + r["successCodes"] = save( + self.successCodes, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="successCodes", + val=r.get("successCodes"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.temporaryFailCodes is not None and "temporaryFailCodes" not in r: + r["temporaryFailCodes"] = save( + self.temporaryFailCodes, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="temporaryFailCodes", + val=r.get("temporaryFailCodes"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.permanentFailCodes is not None and "permanentFailCodes" not in r: + r["permanentFailCodes"] = save( + self.permanentFailCodes, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="permanentFailCodes", + val=r.get("permanentFailCodes"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "class", + "baseCommand", + "arguments", + "stdin", + "stderr", + "stdout", + "successCodes", + "temporaryFailCodes", + "permanentFailCodes", + ] + ) + + +class DockerRequirement(ProcessRequirement): + """ + Indicates that a workflow component should be run in a + [Docker](http://docker.com) or Docker-compatible (such as + [Singularity](https://www.sylabs.io/) and [udocker](https://github.com/indigo-dc/udocker)) container environment and + specifies how to fetch or build the image. + + If a CommandLineTool lists `DockerRequirement` under + `hints` (or `requirements`), it may (or must) be run in the specified Docker + container. + + The platform must first acquire or install the correct Docker image as + specified by `dockerPull`, `dockerImport`, `dockerLoad` or `dockerFile`. + + The platform must execute the tool in the container using `docker run` with + the appropriate Docker image and tool command line. + + The workflow platform may provide input files and the designated output + directory through the use of volume bind mounts. The platform should rewrite + file paths in the input object to correspond to the Docker bind mounted + locations. That is, the platform should rewrite values in the parameter context + such as `runtime.outdir`, `runtime.tmpdir` and others to be valid paths + within the container. The platform must ensure that `runtime.outdir` and + `runtime.tmpdir` are distinct directories. + + When running a tool contained in Docker, the workflow platform must not + assume anything about the contents of the Docker container, such as the + presence or absence of specific software, except to assume that the + generated command line represents a valid command within the runtime + environment of the container. + + A container image may specify an + [ENTRYPOINT](https://docs.docker.com/engine/reference/builder/#entrypoint) + and/or + [CMD](https://docs.docker.com/engine/reference/builder/#cmd). + Command line arguments will be appended after all elements of + ENTRYPOINT, and will override all elements specified using CMD (in + other words, CMD is only used when the CommandLineTool definition + produces an empty command line). + + Use of implicit ENTRYPOINT or CMD are discouraged due to reproducibility + concerns of the implicit hidden execution point (For further discussion, see + [https://doi.org/10.12688/f1000research.15140.1](https://doi.org/10.12688/f1000research.15140.1)). Portable + CommandLineTool wrappers in which use of a container is optional must not rely on ENTRYPOINT or CMD. + CommandLineTools which do rely on ENTRYPOINT or CMD must list `DockerRequirement` in the + `requirements` section. + + ## Interaction with other requirements + + If [EnvVarRequirement](#EnvVarRequirement) is specified alongside a + DockerRequirement, the environment variables must be provided to Docker + using `--env` or `--env-file` and interact with the container's preexisting + environment as defined by Docker. + + """ + + def __init__( + self, + dockerPull: Optional[Any] = None, + dockerLoad: Optional[Any] = None, + dockerFile: Optional[Any] = None, + dockerImport: Optional[Any] = None, + dockerImageId: Optional[Any] = None, + dockerOutputDirectory: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "DockerRequirement" + self.dockerPull = dockerPull + self.dockerLoad = dockerLoad + self.dockerFile = dockerFile + self.dockerImport = dockerImport + self.dockerImageId = dockerImageId + self.dockerOutputDirectory = dockerOutputDirectory + + def __eq__(self, other: Any) -> bool: + if isinstance(other, DockerRequirement): + return bool( + self.class_ == other.class_ + and self.dockerPull == other.dockerPull + and self.dockerLoad == other.dockerLoad + and self.dockerFile == other.dockerFile + and self.dockerImport == other.dockerImport + and self.dockerImageId == other.dockerImageId + and self.dockerOutputDirectory == other.dockerOutputDirectory + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.dockerPull, + self.dockerLoad, + self.dockerFile, + self.dockerImport, + self.dockerImageId, + self.dockerOutputDirectory, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "DockerRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "DockerRequirement": + raise ValidationException("Not a DockerRequirement") + + if "dockerPull" in _doc: + try: + dockerPull = load_field( + _doc.get("dockerPull"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerPull` field is not valid because:", + SourceLine(_doc, "dockerPull", str), + [e], + ) + ) + else: + dockerPull = None + if "dockerLoad" in _doc: + try: + dockerLoad = load_field( + _doc.get("dockerLoad"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerLoad` field is not valid because:", + SourceLine(_doc, "dockerLoad", str), + [e], + ) + ) + else: + dockerLoad = None + if "dockerFile" in _doc: + try: + dockerFile = load_field( + _doc.get("dockerFile"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerFile` field is not valid because:", + SourceLine(_doc, "dockerFile", str), + [e], + ) + ) + else: + dockerFile = None + if "dockerImport" in _doc: + try: + dockerImport = load_field( + _doc.get("dockerImport"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerImport` field is not valid because:", + SourceLine(_doc, "dockerImport", str), + [e], + ) + ) + else: + dockerImport = None + if "dockerImageId" in _doc: + try: + dockerImageId = load_field( + _doc.get("dockerImageId"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerImageId` field is not valid because:", + SourceLine(_doc, "dockerImageId", str), + [e], + ) + ) + else: + dockerImageId = None + if "dockerOutputDirectory" in _doc: + try: + dockerOutputDirectory = load_field( + _doc.get("dockerOutputDirectory"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerOutputDirectory` field is not valid because:", + SourceLine(_doc, "dockerOutputDirectory", str), + [e], + ) + ) + else: + dockerOutputDirectory = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `dockerPull`, `dockerLoad`, `dockerFile`, `dockerImport`, `dockerImageId`, `dockerOutputDirectory`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'DockerRequirement'", None, _errors__) + _constructed = cls( + dockerPull=dockerPull, + dockerLoad=dockerLoad, + dockerFile=dockerFile, + dockerImport=dockerImport, + dockerImageId=dockerImageId, + dockerOutputDirectory=dockerOutputDirectory, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "DockerRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.dockerPull is not None and "dockerPull" not in r: + r["dockerPull"] = save( + self.dockerPull, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerPull", + val=r.get("dockerPull"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dockerLoad is not None and "dockerLoad" not in r: + r["dockerLoad"] = save( + self.dockerLoad, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerLoad", + val=r.get("dockerLoad"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dockerFile is not None and "dockerFile" not in r: + r["dockerFile"] = save( + self.dockerFile, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerFile", + val=r.get("dockerFile"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dockerImport is not None and "dockerImport" not in r: + r["dockerImport"] = save( + self.dockerImport, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerImport", + val=r.get("dockerImport"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dockerImageId is not None and "dockerImageId" not in r: + r["dockerImageId"] = save( + self.dockerImageId, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerImageId", + val=r.get("dockerImageId"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dockerOutputDirectory is not None and "dockerOutputDirectory" not in r: + r["dockerOutputDirectory"] = save( + self.dockerOutputDirectory, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerOutputDirectory", + val=r.get("dockerOutputDirectory"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "dockerPull", + "dockerLoad", + "dockerFile", + "dockerImport", + "dockerImageId", + "dockerOutputDirectory", + ] + ) + + +class SoftwareRequirement(ProcessRequirement): + """ + A list of software packages that should be configured in the environment of + the defined process. + + """ + + def __init__( + self, + packages: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SoftwareRequirement" + self.packages = packages + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SoftwareRequirement): + return bool(self.class_ == other.class_ and self.packages == other.packages) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.packages)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SoftwareRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "SoftwareRequirement": + raise ValidationException("Not a SoftwareRequirement") + + try: + packages = load_field( + _doc.get("packages"), + idmap_packages_array_of_SoftwarePackageLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `packages` field is not valid because:", + SourceLine(_doc, "packages", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `packages`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SoftwareRequirement'", None, _errors__) + _constructed = cls( + packages=packages, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "SoftwareRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.packages is not None and "packages" not in r: + r["packages"] = save( + self.packages, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="packages", + val=r.get("packages"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "packages"]) + + +class SoftwarePackage(Saveable): + def __init__( + self, + package: Any, + version: Optional[Any] = None, + specs: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.package = package + self.version = version + self.specs = specs + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SoftwarePackage): + return bool( + self.package == other.package + and self.version == other.version + and self.specs == other.specs + ) + return False + + def __hash__(self) -> int: + return hash((self.package, self.version, self.specs)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SoftwarePackage": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + package = load_field( + _doc.get("package"), + strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `package` field is not valid because:", + SourceLine(_doc, "package", str), + [e], + ) + ) + if "version" in _doc: + try: + version = load_field( + _doc.get("version"), + union_of_None_type_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `version` field is not valid because:", + SourceLine(_doc, "version", str), + [e], + ) + ) + else: + version = None + if "specs" in _doc: + try: + specs = load_field( + _doc.get("specs"), + uri_union_of_None_type_or_array_of_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `specs` field is not valid because:", + SourceLine(_doc, "specs", str), + [e], + ) + ) + else: + specs = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `package`, `version`, `specs`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SoftwarePackage'", None, _errors__) + _constructed = cls( + package=package, + version=version, + specs=specs, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.package is not None and "package" not in r: + r["package"] = save( + self.package, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="package", + val=r.get("package"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.version is not None and "version" not in r: + r["version"] = save( + self.version, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="version", + val=r.get("version"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.specs is not None and "specs" not in r: + u = save_relative_uri(self.specs, base_url, False, None, relative_uris) + r["specs"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="specs", + val=r.get("specs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["package", "version", "specs"]) + + +class Dirent(Saveable): + """ + Define a file or subdirectory that must be placed in the designated output + directory prior to executing the command line tool. May be the result of + executing an expression, such as building a configuration file from a + template. + + """ + + def __init__( + self, + entry: Any, + entryname: Optional[Any] = None, + writable: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.entryname = entryname + self.entry = entry + self.writable = writable + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Dirent): + return bool( + self.entryname == other.entryname + and self.entry == other.entry + and self.writable == other.writable + ) + return False + + def __hash__(self) -> int: + return hash((self.entryname, self.entry, self.writable)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Dirent": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "entryname" in _doc: + try: + entryname = load_field( + _doc.get("entryname"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `entryname` field is not valid because:", + SourceLine(_doc, "entryname", str), + [e], + ) + ) + else: + entryname = None + try: + entry = load_field( + _doc.get("entry"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `entry` field is not valid because:", + SourceLine(_doc, "entry", str), + [e], + ) + ) + if "writable" in _doc: + try: + writable = load_field( + _doc.get("writable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `writable` field is not valid because:", + SourceLine(_doc, "writable", str), + [e], + ) + ) + else: + writable = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `entryname`, `entry`, `writable`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Dirent'", None, _errors__) + _constructed = cls( + entryname=entryname, + entry=entry, + writable=writable, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.entryname is not None and "entryname" not in r: + r["entryname"] = save( + self.entryname, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="entryname", + val=r.get("entryname"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.entry is not None and "entry" not in r: + r["entry"] = save( + self.entry, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="entry", + val=r.get("entry"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.writable is not None and "writable" not in r: + r["writable"] = save( + self.writable, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="writable", + val=r.get("writable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["entryname", "entry", "writable"]) + + +class InitialWorkDirRequirement(ProcessRequirement): + """ + Define a list of files and subdirectories that must be created by the workflow platform in the designated output directory prior to executing the command line tool. + """ + + def __init__( + self, + listing: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "InitialWorkDirRequirement" + self.listing = listing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InitialWorkDirRequirement): + return bool(self.class_ == other.class_ and self.listing == other.listing) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.listing)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InitialWorkDirRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "InitialWorkDirRequirement": + raise ValidationException("Not a InitialWorkDirRequirement") + + try: + listing = load_field( + _doc.get("listing"), + union_of_array_of_union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `listing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'InitialWorkDirRequirement'", None, _errors__ + ) + _constructed = cls( + listing=listing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "InitialWorkDirRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.listing is not None and "listing" not in r: + r["listing"] = save( + self.listing, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="listing", + val=r.get("listing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "listing"]) + + +class EnvVarRequirement(ProcessRequirement): + """ + Define a list of environment variables which will be set in the + execution environment of the tool. See `EnvironmentDef` for details. + + """ + + def __init__( + self, + envDef: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "EnvVarRequirement" + self.envDef = envDef + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnvVarRequirement): + return bool(self.class_ == other.class_ and self.envDef == other.envDef) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.envDef)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "EnvVarRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "EnvVarRequirement": + raise ValidationException("Not a EnvVarRequirement") + + try: + envDef = load_field( + _doc.get("envDef"), + idmap_envDef_array_of_EnvironmentDefLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `envDef` field is not valid because:", + SourceLine(_doc, "envDef", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `envDef`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'EnvVarRequirement'", None, _errors__) + _constructed = cls( + envDef=envDef, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "EnvVarRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.envDef is not None and "envDef" not in r: + r["envDef"] = save( + self.envDef, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="envDef", + val=r.get("envDef"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "envDef"]) + + +class ShellCommandRequirement(ProcessRequirement): + """ + Modify the behavior of CommandLineTool to generate a single string + containing a shell command line. Each item in the argument list must be + joined into a string separated by single spaces and quoted to prevent + intepretation by the shell, unless `CommandLineBinding` for that argument + contains `shellQuote: false`. If `shellQuote: false` is specified, the + argument is joined into the command string without quoting, which allows + the use of shell metacharacters such as `|` for pipes. + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ShellCommandRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ShellCommandRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ShellCommandRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ShellCommandRequirement": + raise ValidationException("Not a ShellCommandRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'ShellCommandRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ShellCommandRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class ResourceRequirement(ProcessRequirement): + """ + Specify basic hardware resource requirements. + + "min" is the minimum amount of a resource that must be reserved to schedule + a job. If "min" cannot be satisfied, the job should not be run. + + "max" is the maximum amount of a resource that the job shall be permitted + to use. If a node has sufficient resources, multiple jobs may be scheduled + on a single node provided each job's "max" resource requirements are + met. If a job attempts to exceed its "max" resource allocation, an + implementation may deny additional resources, which may result in job + failure. + + If "min" is specified but "max" is not, then "max" == "min" + If "max" is specified by "min" is not, then "min" == "max". + + It is an error if max < min. + + It is an error if the value of any of these fields is negative. + + If neither "min" nor "max" is specified for a resource, use the default values below. + + """ + + def __init__( + self, + coresMin: Optional[Any] = None, + coresMax: Optional[Any] = None, + ramMin: Optional[Any] = None, + ramMax: Optional[Any] = None, + tmpdirMin: Optional[Any] = None, + tmpdirMax: Optional[Any] = None, + outdirMin: Optional[Any] = None, + outdirMax: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ResourceRequirement" + self.coresMin = coresMin + self.coresMax = coresMax + self.ramMin = ramMin + self.ramMax = ramMax + self.tmpdirMin = tmpdirMin + self.tmpdirMax = tmpdirMax + self.outdirMin = outdirMin + self.outdirMax = outdirMax + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ResourceRequirement): + return bool( + self.class_ == other.class_ + and self.coresMin == other.coresMin + and self.coresMax == other.coresMax + and self.ramMin == other.ramMin + and self.ramMax == other.ramMax + and self.tmpdirMin == other.tmpdirMin + and self.tmpdirMax == other.tmpdirMax + and self.outdirMin == other.outdirMin + and self.outdirMax == other.outdirMax + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.coresMin, + self.coresMax, + self.ramMin, + self.ramMax, + self.tmpdirMin, + self.tmpdirMax, + self.outdirMin, + self.outdirMax, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ResourceRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ResourceRequirement": + raise ValidationException("Not a ResourceRequirement") + + if "coresMin" in _doc: + try: + coresMin = load_field( + _doc.get("coresMin"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `coresMin` field is not valid because:", + SourceLine(_doc, "coresMin", str), + [e], + ) + ) + else: + coresMin = None + if "coresMax" in _doc: + try: + coresMax = load_field( + _doc.get("coresMax"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `coresMax` field is not valid because:", + SourceLine(_doc, "coresMax", str), + [e], + ) + ) + else: + coresMax = None + if "ramMin" in _doc: + try: + ramMin = load_field( + _doc.get("ramMin"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `ramMin` field is not valid because:", + SourceLine(_doc, "ramMin", str), + [e], + ) + ) + else: + ramMin = None + if "ramMax" in _doc: + try: + ramMax = load_field( + _doc.get("ramMax"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `ramMax` field is not valid because:", + SourceLine(_doc, "ramMax", str), + [e], + ) + ) + else: + ramMax = None + if "tmpdirMin" in _doc: + try: + tmpdirMin = load_field( + _doc.get("tmpdirMin"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `tmpdirMin` field is not valid because:", + SourceLine(_doc, "tmpdirMin", str), + [e], + ) + ) + else: + tmpdirMin = None + if "tmpdirMax" in _doc: + try: + tmpdirMax = load_field( + _doc.get("tmpdirMax"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `tmpdirMax` field is not valid because:", + SourceLine(_doc, "tmpdirMax", str), + [e], + ) + ) + else: + tmpdirMax = None + if "outdirMin" in _doc: + try: + outdirMin = load_field( + _doc.get("outdirMin"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outdirMin` field is not valid because:", + SourceLine(_doc, "outdirMin", str), + [e], + ) + ) + else: + outdirMin = None + if "outdirMax" in _doc: + try: + outdirMax = load_field( + _doc.get("outdirMax"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outdirMax` field is not valid because:", + SourceLine(_doc, "outdirMax", str), + [e], + ) + ) + else: + outdirMax = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `coresMin`, `coresMax`, `ramMin`, `ramMax`, `tmpdirMin`, `tmpdirMax`, `outdirMin`, `outdirMax`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ResourceRequirement'", None, _errors__) + _constructed = cls( + coresMin=coresMin, + coresMax=coresMax, + ramMin=ramMin, + ramMax=ramMax, + tmpdirMin=tmpdirMin, + tmpdirMax=tmpdirMax, + outdirMin=outdirMin, + outdirMax=outdirMax, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ResourceRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.coresMin is not None and "coresMin" not in r: + r["coresMin"] = save( + self.coresMin, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="coresMin", + val=r.get("coresMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.coresMax is not None and "coresMax" not in r: + r["coresMax"] = save( + self.coresMax, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="coresMax", + val=r.get("coresMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.ramMin is not None and "ramMin" not in r: + r["ramMin"] = save( + self.ramMin, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="ramMin", + val=r.get("ramMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.ramMax is not None and "ramMax" not in r: + r["ramMax"] = save( + self.ramMax, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="ramMax", + val=r.get("ramMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.tmpdirMin is not None and "tmpdirMin" not in r: + r["tmpdirMin"] = save( + self.tmpdirMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="tmpdirMin", + val=r.get("tmpdirMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.tmpdirMax is not None and "tmpdirMax" not in r: + r["tmpdirMax"] = save( + self.tmpdirMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="tmpdirMax", + val=r.get("tmpdirMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outdirMin is not None and "outdirMin" not in r: + r["outdirMin"] = save( + self.outdirMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outdirMin", + val=r.get("outdirMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outdirMax is not None and "outdirMax" not in r: + r["outdirMax"] = save( + self.outdirMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outdirMax", + val=r.get("outdirMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "coresMin", + "coresMax", + "ramMin", + "ramMax", + "tmpdirMin", + "tmpdirMax", + "outdirMin", + "outdirMax", + ] + ) + + +class WorkReuse(ProcessRequirement): + """ + For implementations that support reusing output from past work (on + the assumption that same code and same input produce same + results), control whether to enable or disable the reuse behavior + for a particular tool or step (to accomodate situations where that + assumption is incorrect). A reused step is not executed but + instead returns the same output as the original execution. + + If `enableReuse` is not specified, correct tools should assume it + is enabled by default. + + """ + + def __init__( + self, + enableReuse: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "WorkReuse" + self.enableReuse = enableReuse + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkReuse): + return bool( + self.class_ == other.class_ and self.enableReuse == other.enableReuse + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.enableReuse)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkReuse": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "WorkReuse": + raise ValidationException("Not a WorkReuse") + + try: + enableReuse = load_field( + _doc.get("enableReuse"), + union_of_booltype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `enableReuse` field is not valid because:", + SourceLine(_doc, "enableReuse", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `enableReuse`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkReuse'", None, _errors__) + _constructed = cls( + enableReuse=enableReuse, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "WorkReuse" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.enableReuse is not None and "enableReuse" not in r: + r["enableReuse"] = save( + self.enableReuse, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="enableReuse", + val=r.get("enableReuse"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "enableReuse"]) + + +class NetworkAccess(ProcessRequirement): + """ + Indicate whether a process requires outgoing IPv4/IPv6 network + access. Choice of IPv4 or IPv6 is implementation and site + specific, correct tools must support both. + + If `networkAccess` is false or not specified, tools must not + assume network access, except for localhost (the loopback device). + + If `networkAccess` is true, the tool must be able to make outgoing + connections to network resources. Resources may be on a private + subnet or the public Internet. However, implementations and sites + may apply their own security policies to restrict what is + accessible by the tool. + + Enabling network access does not imply a publically routable IP + address or the ability to accept inbound connections. + + """ + + def __init__( + self, + networkAccess: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "NetworkAccess" + self.networkAccess = networkAccess + + def __eq__(self, other: Any) -> bool: + if isinstance(other, NetworkAccess): + return bool( + self.class_ == other.class_ + and self.networkAccess == other.networkAccess + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.networkAccess)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "NetworkAccess": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "NetworkAccess": + raise ValidationException("Not a NetworkAccess") + + try: + networkAccess = load_field( + _doc.get("networkAccess"), + union_of_booltype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `networkAccess` field is not valid because:", + SourceLine(_doc, "networkAccess", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `networkAccess`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'NetworkAccess'", None, _errors__) + _constructed = cls( + networkAccess=networkAccess, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "NetworkAccess" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.networkAccess is not None and "networkAccess" not in r: + r["networkAccess"] = save( + self.networkAccess, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="networkAccess", + val=r.get("networkAccess"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "networkAccess"]) + + +class InplaceUpdateRequirement(ProcessRequirement): + """ + + If `inplaceUpdate` is true, then an implementation supporting this + feature may permit tools to directly update files with `writable: + true` in InitialWorkDirRequirement. That is, as an optimization, + files may be destructively modified in place as opposed to copied + and updated. + + An implementation must ensure that only one workflow step may + access a writable file at a time. It is an error if a file which + is writable by one workflow step file is accessed (for reading or + writing) by any other workflow step running independently. + However, a file which has been updated in a previous completed + step may be used as input to multiple steps, provided it is + read-only in every step. + + Workflow steps which modify a file must produce the modified file + as output. Downstream steps which futher process the file must + use the output of previous steps, and not refer to a common input + (this is necessary for both ordering and correctness). + + Workflow authors should provide this in the `hints` section. The + intent of this feature is that workflows produce the same results + whether or not InplaceUpdateRequirement is supported by the + implementation, and this feature is primarily available as an + optimization for particular environments. + + Users and implementers should be aware that workflows that + destructively modify inputs may not be repeatable or reproducible. + In particular, enabling this feature implies that WorkReuse should + not be enabled. + + """ + + def __init__( + self, + inplaceUpdate: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "InplaceUpdateRequirement" + self.inplaceUpdate = inplaceUpdate + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InplaceUpdateRequirement): + return bool( + self.class_ == other.class_ + and self.inplaceUpdate == other.inplaceUpdate + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.inplaceUpdate)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InplaceUpdateRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "InplaceUpdateRequirement": + raise ValidationException("Not a InplaceUpdateRequirement") + + try: + inplaceUpdate = load_field( + _doc.get("inplaceUpdate"), + booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inplaceUpdate` field is not valid because:", + SourceLine(_doc, "inplaceUpdate", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `inplaceUpdate`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'InplaceUpdateRequirement'", None, _errors__ + ) + _constructed = cls( + inplaceUpdate=inplaceUpdate, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "InplaceUpdateRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.inplaceUpdate is not None and "inplaceUpdate" not in r: + r["inplaceUpdate"] = save( + self.inplaceUpdate, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inplaceUpdate", + val=r.get("inplaceUpdate"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "inplaceUpdate"]) + + +class ToolTimeLimit(ProcessRequirement): + """ + Set an upper limit on the execution time of a CommandLineTool. + A CommandLineTool whose execution duration exceeds the time + limit may be preemptively terminated and considered failed. + May also be used by batch systems to make scheduling decisions. + The execution duration excludes external operations, such as + staging of files, pulling a docker image etc, and only counts + wall-time for the execution of the command line itself. + + """ + + def __init__( + self, + timelimit: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ToolTimeLimit" + self.timelimit = timelimit + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ToolTimeLimit): + return bool( + self.class_ == other.class_ and self.timelimit == other.timelimit + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.timelimit)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ToolTimeLimit": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ToolTimeLimit": + raise ValidationException("Not a ToolTimeLimit") + + try: + timelimit = load_field( + _doc.get("timelimit"), + union_of_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `timelimit` field is not valid because:", + SourceLine(_doc, "timelimit", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `timelimit`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ToolTimeLimit'", None, _errors__) + _constructed = cls( + timelimit=timelimit, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ToolTimeLimit" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.timelimit is not None and "timelimit" not in r: + r["timelimit"] = save( + self.timelimit, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="timelimit", + val=r.get("timelimit"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "timelimit"]) + + +class ExpressionToolOutputParameter(OutputParameter): + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ExpressionToolOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ExpressionToolOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'ExpressionToolOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] + ) + + +class WorkflowInputParameter(InputParameter): + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + default: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default + self.type = type + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowInputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default + and self.type == other.type + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.loadContents, + self.loadListing, + self.default, + self.type, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowInputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_InputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'WorkflowInputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + loadContents=loadContents, + loadListing=loadListing, + default=default, + type=type, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "loadContents", + "loadListing", + "default", + "type", + "inputBinding", + ] + ) + + +class ExpressionTool(Process): + """ + An ExpressionTool is a type of Process object that can be run by itself + or as a Workflow step. It executes a pure Javascript expression that has + access to the same input parameters as a workflow. It is meant to be used + sparingly as a way to isolate complex Javascript expressions that need to + operate on input data and produce some result; perhaps just a + rearrangement of the inputs. No Docker software container is required + or allowed. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + expression: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.class_ = "ExpressionTool" + self.expression = expression + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ExpressionTool): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.class_ == other.class_ + and self.expression == other.expression + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.class_, + self.expression, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ExpressionTool": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ExpressionTool": + raise ValidationException("Not a ExpressionTool") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_WorkflowInputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_ExpressionToolOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + try: + expression = load_field( + _doc.get("expression"), + ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `expression` field is not valid because:", + SourceLine(_doc, "expression", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `class`, `expression`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ExpressionTool'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + expression=expression, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ExpressionTool" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.expression is not None and "expression" not in r: + r["expression"] = save( + self.expression, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="expression", + val=r.get("expression"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "class", + "expression", + ] + ) + + +class WorkflowOutputParameter(OutputParameter): + """ + Describe an output parameter of a workflow. The parameter must be + connected to one or more parameters defined in the workflow that + will provide the value of the output parameter. It is legal to + connect a WorkflowInputParameter to a WorkflowOutputParameter. + + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + outputSource: Optional[Any] = None, + linkMerge: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.outputSource = outputSource + self.linkMerge = linkMerge + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.outputSource == other.outputSource + and self.linkMerge == other.linkMerge + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.outputSource, + self.linkMerge, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "outputSource" in _doc: + try: + outputSource = load_field( + _doc.get("outputSource"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputSource` field is not valid because:", + SourceLine(_doc, "outputSource", str), + [e], + ) + ) + else: + outputSource = None + if "linkMerge" in _doc: + try: + linkMerge = load_field( + _doc.get("linkMerge"), + union_of_None_type_or_LinkMergeMethodLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [e], + ) + ) + else: + linkMerge = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `outputSource`, `linkMerge`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'WorkflowOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + outputSource=outputSource, + linkMerge=linkMerge, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputSource is not None and "outputSource" not in r: + u = save_relative_uri( + self.outputSource, str(self.id), False, 1, relative_uris + ) + r["outputSource"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputSource", + val=r.get("outputSource"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.linkMerge is not None and "linkMerge" not in r: + r["linkMerge"] = save( + self.linkMerge, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="linkMerge", + val=r.get("linkMerge"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "outputSource", + "linkMerge", + "type", + ] + ) + + +class Sink(Saveable): + pass + + +class WorkflowStepInput(Identified, Sink, LoadContents, Labeled): + """ + The input of a workflow step connects an upstream parameter (from the + workflow inputs, or the outputs of other workflows steps) with the input + parameters of the process specified by the `run` field. Only input parameters + declared by the target process will be passed through at runtime to the process + though additonal parameters may be specified (for use within `valueFrom` + expressions for instance) - unconnected or unused parameters do not represent an + error condition. + + ## Input object + + A WorkflowStepInput object must contain an `id` field in the form + `#fieldname` or `#prefix/fieldname`. When the `id` field contains a slash + `/` the field name consists of the characters following the final slash + (the prefix portion may contain one or more slashes to indicate scope). + This defines a field of the workflow step input object with the value of + the `source` parameter(s). + + ## Merging + + To merge multiple inbound data links, + [MultipleInputFeatureRequirement](#MultipleInputFeatureRequirement) must be specified + in the workflow or workflow step requirements. + + If the sink parameter is an array, or named in a [workflow + scatter](#WorkflowStep) operation, there may be multiple inbound data links + listed in the `source` field. The values from the input links are merged + depending on the method specified in the `linkMerge` field. If not + specified, the default method is "merge_nested". + + * **merge_nested** + + The input must be an array consisting of exactly one entry for each + input link. If "merge_nested" is specified with a single link, the value + from the link must be wrapped in a single-item list. + + * **merge_flattened** + + 1. The source and sink parameters must be compatible types, or the source + type must be compatible with single element from the "items" type of + the destination array parameter. + 2. Source parameters which are arrays are concatenated. + Source parameters which are single element types are appended as + single elements. + + """ + + def __init__( + self, + id: Optional[Any] = None, + source: Optional[Any] = None, + linkMerge: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + label: Optional[Any] = None, + default: Optional[Any] = None, + valueFrom: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.source = source + self.linkMerge = linkMerge + self.loadContents = loadContents + self.loadListing = loadListing + self.label = label + self.default = default + self.valueFrom = valueFrom + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStepInput): + return bool( + self.id == other.id + and self.source == other.source + and self.linkMerge == other.linkMerge + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.label == other.label + and self.default == other.default + and self.valueFrom == other.valueFrom + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.source, + self.linkMerge, + self.loadContents, + self.loadListing, + self.label, + self.default, + self.valueFrom, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowStepInput": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "source" in _doc: + try: + source = load_field( + _doc.get("source"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `source` field is not valid because:", + SourceLine(_doc, "source", str), + [e], + ) + ) + else: + source = None + if "linkMerge" in _doc: + try: + linkMerge = load_field( + _doc.get("linkMerge"), + union_of_None_type_or_LinkMergeMethodLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [e], + ) + ) + else: + linkMerge = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + if "valueFrom" in _doc: + try: + valueFrom = load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [e], + ) + ) + else: + valueFrom = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `source`, `linkMerge`, `loadContents`, `loadListing`, `label`, `default`, `valueFrom`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkflowStepInput'", None, _errors__) + _constructed = cls( + id=id, + source=source, + linkMerge=linkMerge, + loadContents=loadContents, + loadListing=loadListing, + label=label, + default=default, + valueFrom=valueFrom, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.source is not None and "source" not in r: + u = save_relative_uri(self.source, str(self.id), False, 2, relative_uris) + r["source"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="source", + val=r.get("source"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.linkMerge is not None and "linkMerge" not in r: + r["linkMerge"] = save( + self.linkMerge, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="linkMerge", + val=r.get("linkMerge"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.valueFrom is not None and "valueFrom" not in r: + r["valueFrom"] = save( + self.valueFrom, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="valueFrom", + val=r.get("valueFrom"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "source", + "linkMerge", + "loadContents", + "loadListing", + "label", + "default", + "valueFrom", + ] + ) + + +class WorkflowStepOutput(Identified): + """ + Associate an output parameter of the underlying process with a workflow + parameter. The workflow parameter (given in the `id` field) be may be used + as a `source` to connect with input parameters of other workflow steps, or + with an output parameter of the process. + + A unique identifier for this workflow output parameter. This is + the identifier to use in the `source` field of `WorkflowStepInput` + to connect the output value to downstream parameters. + + """ + + def __init__( + self, + id: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStepOutput): + return bool(self.id == other.id) + return False + + def __hash__(self) -> int: + return hash((self.id)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowStepOutput": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkflowStepOutput'", None, _errors__) + _constructed = cls( + id=id, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["id"]) + + +class WorkflowStep(Identified, Labeled, Documented): + """ + A workflow step is an executable element of a workflow. It specifies the + underlying process implementation (such as `CommandLineTool` or another + `Workflow`) in the `run` field and connects the input and output parameters + of the underlying process to workflow parameters. + + # Scatter/gather + + To use scatter/gather, + [ScatterFeatureRequirement](#ScatterFeatureRequirement) must be specified + in the workflow or workflow step requirements. + + A "scatter" operation specifies that the associated workflow step or + subworkflow should execute separately over a list of input elements. Each + job making up a scatter operation is independent and may be executed + concurrently. + + The `scatter` field specifies one or more input parameters which will be + scattered. An input parameter may be listed more than once. The declared + type of each input parameter is implicitly becomes an array of items of the + input parameter type. If a parameter is listed more than once, it becomes + a nested array. As a result, upstream parameters which are connected to + scattered parameters must be arrays. + + All output parameter types are also implicitly wrapped in arrays. Each job + in the scatter results in an entry in the output array. + + If any scattered parameter runtime value is an empty array, all outputs are + set to empty arrays and no work is done for the step, according to + applicable scattering rules. + + If `scatter` declares more than one input parameter, `scatterMethod` + describes how to decompose the input into a discrete set of jobs. + + * **dotproduct** specifies that each of the input arrays are aligned and one + element taken from each array to construct each job. It is an error + if all input arrays are not the same length. + + * **nested_crossproduct** specifies the Cartesian product of the inputs, + producing a job for every combination of the scattered inputs. The + output must be nested arrays for each level of scattering, in the + order that the input arrays are listed in the `scatter` field. + + * **flat_crossproduct** specifies the Cartesian product of the inputs, + producing a job for every combination of the scattered inputs. The + output arrays must be flattened to a single level, but otherwise listed in the + order that the input arrays are listed in the `scatter` field. + + # Subworkflows + + To specify a nested workflow as part of a workflow step, + [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) must be + specified in the workflow or workflow step requirements. + + It is a fatal error if a workflow directly or indirectly invokes itself as + a subworkflow (recursive workflows are not allowed). + + """ + + def __init__( + self, + in_: Any, + out: Any, + run: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + scatter: Optional[Any] = None, + scatterMethod: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.in_ = in_ + self.out = out + self.requirements = requirements + self.hints = hints + self.run = run + self.scatter = scatter + self.scatterMethod = scatterMethod + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStep): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.in_ == other.in_ + and self.out == other.out + and self.requirements == other.requirements + and self.hints == other.hints + and self.run == other.run + and self.scatter == other.scatter + and self.scatterMethod == other.scatterMethod + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.in_, + self.out, + self.requirements, + self.hints, + self.run, + self.scatter, + self.scatterMethod, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowStep": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + in_ = load_field( + _doc.get("in"), + idmap_in__array_of_WorkflowStepInputLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `in` field is not valid because:", + SourceLine(_doc, "in", str), + [e], + ) + ) + try: + out = load_field( + _doc.get("out"), + uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `out` field is not valid because:", + SourceLine(_doc, "out", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + + subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) + try: + run = load_field( + _doc.get("run"), + uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None, + subscope_baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `run` field is not valid because:", + SourceLine(_doc, "run", str), + [e], + ) + ) + if "scatter" in _doc: + try: + scatter = load_field( + _doc.get("scatter"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `scatter` field is not valid because:", + SourceLine(_doc, "scatter", str), + [e], + ) + ) + else: + scatter = None + if "scatterMethod" in _doc: + try: + scatterMethod = load_field( + _doc.get("scatterMethod"), + uri_union_of_None_type_or_ScatterMethodLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `scatterMethod` field is not valid because:", + SourceLine(_doc, "scatterMethod", str), + [e], + ) + ) + else: + scatterMethod = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `in`, `out`, `requirements`, `hints`, `run`, `scatter`, `scatterMethod`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkflowStep'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + in_=in_, + out=out, + requirements=requirements, + hints=hints, + run=run, + scatter=scatter, + scatterMethod=scatterMethod, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.in_ is not None and "in" not in r: + r["in"] = save( + self.in_, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="in", + val=r.get("in"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.out is not None and "out" not in r: + u = save_relative_uri(self.out, str(self.id), True, None, relative_uris) + r["out"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="out", + val=r.get("out"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.run is not None and "run" not in r: + u = save_relative_uri(self.run, str(self.id), False, None, relative_uris) + r["run"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="run", + val=r.get("run"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.scatter is not None and "scatter" not in r: + u = save_relative_uri(self.scatter, str(self.id), False, 0, relative_uris) + r["scatter"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="scatter", + val=r.get("scatter"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.scatterMethod is not None and "scatterMethod" not in r: + u = save_relative_uri( + self.scatterMethod, str(self.id), False, None, relative_uris + ) + r["scatterMethod"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="scatterMethod", + val=r.get("scatterMethod"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "in", + "out", + "requirements", + "hints", + "run", + "scatter", + "scatterMethod", + ] + ) + + +class Workflow(Process): + """ + A workflow describes a set of **steps** and the **dependencies** between + those steps. When a step produces output that will be consumed by a + second step, the first step is a dependency of the second step. + + When there is a dependency, the workflow engine must execute the preceding + step and wait for it to successfully produce output before executing the + dependent step. If two steps are defined in the workflow graph that + are not directly or indirectly dependent, these steps are **independent**, + and may execute in any order or execute concurrently. A workflow is + complete when all steps have been executed. + + Dependencies between parameters are expressed using the `source` field on + [workflow step input parameters](#WorkflowStepInput) and [workflow output + parameters](#WorkflowOutputParameter). + + The `source` field expresses the dependency of one parameter on another + such that when a value is associated with the parameter specified by + `source`, that value is propagated to the destination parameter. When all + data links inbound to a given step are fufilled, the step is ready to + execute. + + ## Workflow success and failure + + A completed step must result in one of `success`, `temporaryFailure` or + `permanentFailure` states. An implementation may choose to retry a step + execution which resulted in `temporaryFailure`. An implementation may + choose to either continue running other steps of a workflow, or terminate + immediately upon `permanentFailure`. + + * If any step of a workflow execution results in `permanentFailure`, then + the workflow status is `permanentFailure`. + + * If one or more steps result in `temporaryFailure` and all other steps + complete `success` or are not executed, then the workflow status is + `temporaryFailure`. + + * If all workflow steps are executed and complete with `success`, then the + workflow status is `success`. + + # Extensions + + [ScatterFeatureRequirement](#ScatterFeatureRequirement) and + [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) are + available as standard [extensions](#Extensions_and_Metadata) to core + workflow semantics. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + steps: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.class_ = "Workflow" + self.steps = steps + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Workflow): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.class_ == other.class_ + and self.steps == other.steps + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.class_, + self.steps, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Workflow": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "Workflow": + raise ValidationException("Not a Workflow") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_WorkflowInputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_WorkflowOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + try: + steps = load_field( + _doc.get("steps"), + idmap_steps_union_of_array_of_WorkflowStepLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `steps` field is not valid because:", + SourceLine(_doc, "steps", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `class`, `steps`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Workflow'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + steps=steps, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "Workflow" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.steps is not None and "steps" not in r: + r["steps"] = save( + self.steps, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="steps", + val=r.get("steps"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "class", + "steps", + ] + ) + + +class SubworkflowFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support nested workflows in + the `run` field of [WorkflowStep](#WorkflowStep). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SubworkflowFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SubworkflowFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SubworkflowFeatureRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "SubworkflowFeatureRequirement": + raise ValidationException("Not a SubworkflowFeatureRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'SubworkflowFeatureRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "SubworkflowFeatureRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class ScatterFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support the `scatter` and + `scatterMethod` fields of [WorkflowStep](#WorkflowStep). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ScatterFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ScatterFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ScatterFeatureRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ScatterFeatureRequirement": + raise ValidationException("Not a ScatterFeatureRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'ScatterFeatureRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ScatterFeatureRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class MultipleInputFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support multiple inbound data links + listed in the `source` field of [WorkflowStepInput](#WorkflowStepInput). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "MultipleInputFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, MultipleInputFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "MultipleInputFeatureRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "MultipleInputFeatureRequirement": + raise ValidationException("Not a MultipleInputFeatureRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'MultipleInputFeatureRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "MultipleInputFeatureRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class StepInputExpressionRequirement(ProcessRequirement): + """ + Indicate that the workflow platform must support the `valueFrom` field + of [WorkflowStepInput](#WorkflowStepInput). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "StepInputExpressionRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, StepInputExpressionRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "StepInputExpressionRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "StepInputExpressionRequirement": + raise ValidationException("Not a StepInputExpressionRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'StepInputExpressionRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "StepInputExpressionRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +_vocab = { + "Any": "https://w3id.org/cwl/salad#Any", + "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", + "CWLType": "https://w3id.org/cwl/cwl#CWLType", + "CWLVersion": "https://w3id.org/cwl/cwl#CWLVersion", + "CommandInputArraySchema": "https://w3id.org/cwl/cwl#CommandInputArraySchema", + "CommandInputEnumSchema": "https://w3id.org/cwl/cwl#CommandInputEnumSchema", + "CommandInputParameter": "https://w3id.org/cwl/cwl#CommandInputParameter", + "CommandInputRecordField": "https://w3id.org/cwl/cwl#CommandInputRecordField", + "CommandInputRecordSchema": "https://w3id.org/cwl/cwl#CommandInputRecordSchema", + "CommandInputSchema": "https://w3id.org/cwl/cwl#CommandInputSchema", + "CommandLineBindable": "https://w3id.org/cwl/cwl#CommandLineBindable", + "CommandLineBinding": "https://w3id.org/cwl/cwl#CommandLineBinding", + "CommandLineTool": "https://w3id.org/cwl/cwl#CommandLineTool", + "CommandOutputArraySchema": "https://w3id.org/cwl/cwl#CommandOutputArraySchema", + "CommandOutputBinding": "https://w3id.org/cwl/cwl#CommandOutputBinding", + "CommandOutputEnumSchema": "https://w3id.org/cwl/cwl#CommandOutputEnumSchema", + "CommandOutputParameter": "https://w3id.org/cwl/cwl#CommandOutputParameter", + "CommandOutputRecordField": "https://w3id.org/cwl/cwl#CommandOutputRecordField", + "CommandOutputRecordSchema": "https://w3id.org/cwl/cwl#CommandOutputRecordSchema", + "Directory": "https://w3id.org/cwl/cwl#Directory", + "Dirent": "https://w3id.org/cwl/cwl#Dirent", + "DockerRequirement": "https://w3id.org/cwl/cwl#DockerRequirement", + "Documented": "https://w3id.org/cwl/salad#Documented", + "EnumSchema": "https://w3id.org/cwl/salad#EnumSchema", + "EnvVarRequirement": "https://w3id.org/cwl/cwl#EnvVarRequirement", + "EnvironmentDef": "https://w3id.org/cwl/cwl#EnvironmentDef", + "Expression": "https://w3id.org/cwl/cwl#Expression", + "ExpressionPlaceholder": "https://w3id.org/cwl/cwl#ExpressionPlaceholder", + "ExpressionTool": "https://w3id.org/cwl/cwl#ExpressionTool", + "ExpressionToolOutputParameter": "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter", + "FieldBase": "https://w3id.org/cwl/cwl#FieldBase", + "File": "https://w3id.org/cwl/cwl#File", + "IOSchema": "https://w3id.org/cwl/cwl#IOSchema", + "Identified": "https://w3id.org/cwl/cwl#Identified", + "InitialWorkDirRequirement": "https://w3id.org/cwl/cwl#InitialWorkDirRequirement", + "InlineJavascriptRequirement": "https://w3id.org/cwl/cwl#InlineJavascriptRequirement", + "InplaceUpdateRequirement": "https://w3id.org/cwl/cwl#InplaceUpdateRequirement", + "InputArraySchema": "https://w3id.org/cwl/cwl#InputArraySchema", + "InputBinding": "https://w3id.org/cwl/cwl#InputBinding", + "InputEnumSchema": "https://w3id.org/cwl/cwl#InputEnumSchema", + "InputFormat": "https://w3id.org/cwl/cwl#InputFormat", + "InputParameter": "https://w3id.org/cwl/cwl#InputParameter", + "InputRecordField": "https://w3id.org/cwl/cwl#InputRecordField", + "InputRecordSchema": "https://w3id.org/cwl/cwl#InputRecordSchema", + "InputSchema": "https://w3id.org/cwl/cwl#InputSchema", + "Labeled": "https://w3id.org/cwl/cwl#Labeled", + "LinkMergeMethod": "https://w3id.org/cwl/cwl#LinkMergeMethod", + "LoadContents": "https://w3id.org/cwl/cwl#LoadContents", + "LoadListingEnum": "https://w3id.org/cwl/cwl#LoadListingEnum", + "LoadListingRequirement": "https://w3id.org/cwl/cwl#LoadListingRequirement", + "MultipleInputFeatureRequirement": "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement", + "NetworkAccess": "https://w3id.org/cwl/cwl#NetworkAccess", + "OutputArraySchema": "https://w3id.org/cwl/cwl#OutputArraySchema", + "OutputEnumSchema": "https://w3id.org/cwl/cwl#OutputEnumSchema", + "OutputFormat": "https://w3id.org/cwl/cwl#OutputFormat", + "OutputParameter": "https://w3id.org/cwl/cwl#OutputParameter", + "OutputRecordField": "https://w3id.org/cwl/cwl#OutputRecordField", + "OutputRecordSchema": "https://w3id.org/cwl/cwl#OutputRecordSchema", + "OutputSchema": "https://w3id.org/cwl/cwl#OutputSchema", + "Parameter": "https://w3id.org/cwl/cwl#Parameter", + "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", + "Process": "https://w3id.org/cwl/cwl#Process", + "ProcessRequirement": "https://w3id.org/cwl/cwl#ProcessRequirement", + "RecordField": "https://w3id.org/cwl/salad#RecordField", + "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", + "ResourceRequirement": "https://w3id.org/cwl/cwl#ResourceRequirement", + "ScatterFeatureRequirement": "https://w3id.org/cwl/cwl#ScatterFeatureRequirement", + "ScatterMethod": "https://w3id.org/cwl/cwl#ScatterMethod", + "SchemaDefRequirement": "https://w3id.org/cwl/cwl#SchemaDefRequirement", + "SecondaryFileSchema": "https://w3id.org/cwl/cwl#SecondaryFileSchema", + "ShellCommandRequirement": "https://w3id.org/cwl/cwl#ShellCommandRequirement", + "Sink": "https://w3id.org/cwl/cwl#Sink", + "SoftwarePackage": "https://w3id.org/cwl/cwl#SoftwarePackage", + "SoftwareRequirement": "https://w3id.org/cwl/cwl#SoftwareRequirement", + "StepInputExpressionRequirement": "https://w3id.org/cwl/cwl#StepInputExpressionRequirement", + "SubworkflowFeatureRequirement": "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement", + "ToolTimeLimit": "https://w3id.org/cwl/cwl#ToolTimeLimit", + "WorkReuse": "https://w3id.org/cwl/cwl#WorkReuse", + "Workflow": "https://w3id.org/cwl/cwl#Workflow", + "WorkflowInputParameter": "https://w3id.org/cwl/cwl#WorkflowInputParameter", + "WorkflowOutputParameter": "https://w3id.org/cwl/cwl#WorkflowOutputParameter", + "WorkflowStep": "https://w3id.org/cwl/cwl#WorkflowStep", + "WorkflowStepInput": "https://w3id.org/cwl/cwl#WorkflowStepInput", + "WorkflowStepOutput": "https://w3id.org/cwl/cwl#WorkflowStepOutput", + "array": "https://w3id.org/cwl/salad#array", + "boolean": "http://www.w3.org/2001/XMLSchema#boolean", + "deep_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing", + "dotproduct": "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct", + "double": "http://www.w3.org/2001/XMLSchema#double", + "draft-2": "https://w3id.org/cwl/cwl#draft-2", + "draft-3": "https://w3id.org/cwl/cwl#draft-3", + "draft-3.dev1": "https://w3id.org/cwl/cwl#draft-3.dev1", + "draft-3.dev2": "https://w3id.org/cwl/cwl#draft-3.dev2", + "draft-3.dev3": "https://w3id.org/cwl/cwl#draft-3.dev3", + "draft-3.dev4": "https://w3id.org/cwl/cwl#draft-3.dev4", + "draft-3.dev5": "https://w3id.org/cwl/cwl#draft-3.dev5", + "draft-4.dev1": "https://w3id.org/cwl/cwl#draft-4.dev1", + "draft-4.dev2": "https://w3id.org/cwl/cwl#draft-4.dev2", + "draft-4.dev3": "https://w3id.org/cwl/cwl#draft-4.dev3", + "enum": "https://w3id.org/cwl/salad#enum", + "flat_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct", + "float": "http://www.w3.org/2001/XMLSchema#float", + "int": "http://www.w3.org/2001/XMLSchema#int", + "long": "http://www.w3.org/2001/XMLSchema#long", + "merge_flattened": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened", + "merge_nested": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested", + "nested_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct", + "no_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/no_listing", + "null": "https://w3id.org/cwl/salad#null", + "record": "https://w3id.org/cwl/salad#record", + "shallow_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/shallow_listing", + "stderr": "https://w3id.org/cwl/cwl#stderr", + "stdin": "https://w3id.org/cwl/cwl#stdin", + "stdout": "https://w3id.org/cwl/cwl#stdout", + "string": "http://www.w3.org/2001/XMLSchema#string", + "v1.0": "https://w3id.org/cwl/cwl#v1.0", + "v1.0.dev4": "https://w3id.org/cwl/cwl#v1.0.dev4", + "v1.1": "https://w3id.org/cwl/cwl#v1.1", + "v1.1.0-dev1": "https://w3id.org/cwl/cwl#v1.1.0-dev1", +} +_rvocab = { + "https://w3id.org/cwl/salad#Any": "Any", + "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", + "https://w3id.org/cwl/cwl#CWLType": "CWLType", + "https://w3id.org/cwl/cwl#CWLVersion": "CWLVersion", + "https://w3id.org/cwl/cwl#CommandInputArraySchema": "CommandInputArraySchema", + "https://w3id.org/cwl/cwl#CommandInputEnumSchema": "CommandInputEnumSchema", + "https://w3id.org/cwl/cwl#CommandInputParameter": "CommandInputParameter", + "https://w3id.org/cwl/cwl#CommandInputRecordField": "CommandInputRecordField", + "https://w3id.org/cwl/cwl#CommandInputRecordSchema": "CommandInputRecordSchema", + "https://w3id.org/cwl/cwl#CommandInputSchema": "CommandInputSchema", + "https://w3id.org/cwl/cwl#CommandLineBindable": "CommandLineBindable", + "https://w3id.org/cwl/cwl#CommandLineBinding": "CommandLineBinding", + "https://w3id.org/cwl/cwl#CommandLineTool": "CommandLineTool", + "https://w3id.org/cwl/cwl#CommandOutputArraySchema": "CommandOutputArraySchema", + "https://w3id.org/cwl/cwl#CommandOutputBinding": "CommandOutputBinding", + "https://w3id.org/cwl/cwl#CommandOutputEnumSchema": "CommandOutputEnumSchema", + "https://w3id.org/cwl/cwl#CommandOutputParameter": "CommandOutputParameter", + "https://w3id.org/cwl/cwl#CommandOutputRecordField": "CommandOutputRecordField", + "https://w3id.org/cwl/cwl#CommandOutputRecordSchema": "CommandOutputRecordSchema", + "https://w3id.org/cwl/cwl#Directory": "Directory", + "https://w3id.org/cwl/cwl#Dirent": "Dirent", + "https://w3id.org/cwl/cwl#DockerRequirement": "DockerRequirement", + "https://w3id.org/cwl/salad#Documented": "Documented", + "https://w3id.org/cwl/salad#EnumSchema": "EnumSchema", + "https://w3id.org/cwl/cwl#EnvVarRequirement": "EnvVarRequirement", + "https://w3id.org/cwl/cwl#EnvironmentDef": "EnvironmentDef", + "https://w3id.org/cwl/cwl#Expression": "Expression", + "https://w3id.org/cwl/cwl#ExpressionPlaceholder": "ExpressionPlaceholder", + "https://w3id.org/cwl/cwl#ExpressionTool": "ExpressionTool", + "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter": "ExpressionToolOutputParameter", + "https://w3id.org/cwl/cwl#FieldBase": "FieldBase", + "https://w3id.org/cwl/cwl#File": "File", + "https://w3id.org/cwl/cwl#IOSchema": "IOSchema", + "https://w3id.org/cwl/cwl#Identified": "Identified", + "https://w3id.org/cwl/cwl#InitialWorkDirRequirement": "InitialWorkDirRequirement", + "https://w3id.org/cwl/cwl#InlineJavascriptRequirement": "InlineJavascriptRequirement", + "https://w3id.org/cwl/cwl#InplaceUpdateRequirement": "InplaceUpdateRequirement", + "https://w3id.org/cwl/cwl#InputArraySchema": "InputArraySchema", + "https://w3id.org/cwl/cwl#InputBinding": "InputBinding", + "https://w3id.org/cwl/cwl#InputEnumSchema": "InputEnumSchema", + "https://w3id.org/cwl/cwl#InputFormat": "InputFormat", + "https://w3id.org/cwl/cwl#InputParameter": "InputParameter", + "https://w3id.org/cwl/cwl#InputRecordField": "InputRecordField", + "https://w3id.org/cwl/cwl#InputRecordSchema": "InputRecordSchema", + "https://w3id.org/cwl/cwl#InputSchema": "InputSchema", + "https://w3id.org/cwl/cwl#Labeled": "Labeled", + "https://w3id.org/cwl/cwl#LinkMergeMethod": "LinkMergeMethod", + "https://w3id.org/cwl/cwl#LoadContents": "LoadContents", + "https://w3id.org/cwl/cwl#LoadListingEnum": "LoadListingEnum", + "https://w3id.org/cwl/cwl#LoadListingRequirement": "LoadListingRequirement", + "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement": "MultipleInputFeatureRequirement", + "https://w3id.org/cwl/cwl#NetworkAccess": "NetworkAccess", + "https://w3id.org/cwl/cwl#OutputArraySchema": "OutputArraySchema", + "https://w3id.org/cwl/cwl#OutputEnumSchema": "OutputEnumSchema", + "https://w3id.org/cwl/cwl#OutputFormat": "OutputFormat", + "https://w3id.org/cwl/cwl#OutputParameter": "OutputParameter", + "https://w3id.org/cwl/cwl#OutputRecordField": "OutputRecordField", + "https://w3id.org/cwl/cwl#OutputRecordSchema": "OutputRecordSchema", + "https://w3id.org/cwl/cwl#OutputSchema": "OutputSchema", + "https://w3id.org/cwl/cwl#Parameter": "Parameter", + "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", + "https://w3id.org/cwl/cwl#Process": "Process", + "https://w3id.org/cwl/cwl#ProcessRequirement": "ProcessRequirement", + "https://w3id.org/cwl/salad#RecordField": "RecordField", + "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", + "https://w3id.org/cwl/cwl#ResourceRequirement": "ResourceRequirement", + "https://w3id.org/cwl/cwl#ScatterFeatureRequirement": "ScatterFeatureRequirement", + "https://w3id.org/cwl/cwl#ScatterMethod": "ScatterMethod", + "https://w3id.org/cwl/cwl#SchemaDefRequirement": "SchemaDefRequirement", + "https://w3id.org/cwl/cwl#SecondaryFileSchema": "SecondaryFileSchema", + "https://w3id.org/cwl/cwl#ShellCommandRequirement": "ShellCommandRequirement", + "https://w3id.org/cwl/cwl#Sink": "Sink", + "https://w3id.org/cwl/cwl#SoftwarePackage": "SoftwarePackage", + "https://w3id.org/cwl/cwl#SoftwareRequirement": "SoftwareRequirement", + "https://w3id.org/cwl/cwl#StepInputExpressionRequirement": "StepInputExpressionRequirement", + "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement": "SubworkflowFeatureRequirement", + "https://w3id.org/cwl/cwl#ToolTimeLimit": "ToolTimeLimit", + "https://w3id.org/cwl/cwl#WorkReuse": "WorkReuse", + "https://w3id.org/cwl/cwl#Workflow": "Workflow", + "https://w3id.org/cwl/cwl#WorkflowInputParameter": "WorkflowInputParameter", + "https://w3id.org/cwl/cwl#WorkflowOutputParameter": "WorkflowOutputParameter", + "https://w3id.org/cwl/cwl#WorkflowStep": "WorkflowStep", + "https://w3id.org/cwl/cwl#WorkflowStepInput": "WorkflowStepInput", + "https://w3id.org/cwl/cwl#WorkflowStepOutput": "WorkflowStepOutput", + "https://w3id.org/cwl/salad#array": "array", + "http://www.w3.org/2001/XMLSchema#boolean": "boolean", + "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing": "deep_listing", + "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct": "dotproduct", + "http://www.w3.org/2001/XMLSchema#double": "double", + "https://w3id.org/cwl/cwl#draft-2": "draft-2", + "https://w3id.org/cwl/cwl#draft-3": "draft-3", + "https://w3id.org/cwl/cwl#draft-3.dev1": "draft-3.dev1", + "https://w3id.org/cwl/cwl#draft-3.dev2": "draft-3.dev2", + "https://w3id.org/cwl/cwl#draft-3.dev3": "draft-3.dev3", + "https://w3id.org/cwl/cwl#draft-3.dev4": "draft-3.dev4", + "https://w3id.org/cwl/cwl#draft-3.dev5": "draft-3.dev5", + "https://w3id.org/cwl/cwl#draft-4.dev1": "draft-4.dev1", + "https://w3id.org/cwl/cwl#draft-4.dev2": "draft-4.dev2", + "https://w3id.org/cwl/cwl#draft-4.dev3": "draft-4.dev3", + "https://w3id.org/cwl/salad#enum": "enum", + "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct": "flat_crossproduct", + "http://www.w3.org/2001/XMLSchema#float": "float", + "http://www.w3.org/2001/XMLSchema#int": "int", + "http://www.w3.org/2001/XMLSchema#long": "long", + "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened": "merge_flattened", + "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested": "merge_nested", + "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct": "nested_crossproduct", + "https://w3id.org/cwl/cwl#LoadListingEnum/no_listing": "no_listing", + "https://w3id.org/cwl/salad#null": "null", + "https://w3id.org/cwl/salad#record": "record", + "https://w3id.org/cwl/cwl#LoadListingEnum/shallow_listing": "shallow_listing", + "https://w3id.org/cwl/cwl#stderr": "stderr", + "https://w3id.org/cwl/cwl#stdin": "stdin", + "https://w3id.org/cwl/cwl#stdout": "stdout", + "http://www.w3.org/2001/XMLSchema#string": "string", + "https://w3id.org/cwl/cwl#v1.0": "v1.0", + "https://w3id.org/cwl/cwl#v1.0.dev4": "v1.0.dev4", + "https://w3id.org/cwl/cwl#v1.1": "v1.1", + "https://w3id.org/cwl/cwl#v1.1.0-dev1": "v1.1.0-dev1", +} + +strtype = _PrimitiveLoader(str) +inttype = _PrimitiveLoader(int) +floattype = _PrimitiveLoader(float) +booltype = _PrimitiveLoader(bool) +None_type = _PrimitiveLoader(type(None)) +Any_type = _AnyLoader() +PrimitiveTypeLoader = _EnumLoader( + ( + "null", + "boolean", + "int", + "long", + "float", + "double", + "string", + ), + "PrimitiveType", +) +AnyLoader = _EnumLoader(("Any",), "Any") +RecordFieldLoader = _RecordLoader(RecordField) +RecordSchemaLoader = _RecordLoader(RecordSchema) +EnumSchemaLoader = _RecordLoader(EnumSchema) +ArraySchemaLoader = _RecordLoader(ArraySchema) +CWLVersionLoader = _EnumLoader( + ( + "draft-2", + "draft-3.dev1", + "draft-3.dev2", + "draft-3.dev3", + "draft-3.dev4", + "draft-3.dev5", + "draft-3", + "draft-4.dev1", + "draft-4.dev2", + "draft-4.dev3", + "v1.0.dev4", + "v1.0", + "v1.1.0-dev1", + "v1.1", + ), + "CWLVersion", +) +CWLTypeLoader = _EnumLoader( + ( + "null", + "boolean", + "int", + "long", + "float", + "double", + "string", + "File", + "Directory", + ), + "CWLType", +) +FileLoader = _RecordLoader(File) +DirectoryLoader = _RecordLoader(Directory) +LoadListingEnumLoader = _EnumLoader( + ( + "no_listing", + "shallow_listing", + "deep_listing", + ), + "LoadListingEnum", +) +ExpressionLoader = _ExpressionLoader(str) +InputBindingLoader = _RecordLoader(InputBinding) +InputRecordFieldLoader = _RecordLoader(InputRecordField) +InputRecordSchemaLoader = _RecordLoader(InputRecordSchema) +InputEnumSchemaLoader = _RecordLoader(InputEnumSchema) +InputArraySchemaLoader = _RecordLoader(InputArraySchema) +OutputRecordFieldLoader = _RecordLoader(OutputRecordField) +OutputRecordSchemaLoader = _RecordLoader(OutputRecordSchema) +OutputEnumSchemaLoader = _RecordLoader(OutputEnumSchema) +OutputArraySchemaLoader = _RecordLoader(OutputArraySchema) +InlineJavascriptRequirementLoader = _RecordLoader(InlineJavascriptRequirement) +SchemaDefRequirementLoader = _RecordLoader(SchemaDefRequirement) +SecondaryFileSchemaLoader = _RecordLoader(SecondaryFileSchema) +LoadListingRequirementLoader = _RecordLoader(LoadListingRequirement) +EnvironmentDefLoader = _RecordLoader(EnvironmentDef) +CommandLineBindingLoader = _RecordLoader(CommandLineBinding) +CommandOutputBindingLoader = _RecordLoader(CommandOutputBinding) +CommandLineBindableLoader = _RecordLoader(CommandLineBindable) +CommandInputRecordFieldLoader = _RecordLoader(CommandInputRecordField) +CommandInputRecordSchemaLoader = _RecordLoader(CommandInputRecordSchema) +CommandInputEnumSchemaLoader = _RecordLoader(CommandInputEnumSchema) +CommandInputArraySchemaLoader = _RecordLoader(CommandInputArraySchema) +CommandOutputRecordFieldLoader = _RecordLoader(CommandOutputRecordField) +CommandOutputRecordSchemaLoader = _RecordLoader(CommandOutputRecordSchema) +CommandOutputEnumSchemaLoader = _RecordLoader(CommandOutputEnumSchema) +CommandOutputArraySchemaLoader = _RecordLoader(CommandOutputArraySchema) +CommandInputParameterLoader = _RecordLoader(CommandInputParameter) +CommandOutputParameterLoader = _RecordLoader(CommandOutputParameter) +stdinLoader = _EnumLoader(("stdin",), "stdin") +stdoutLoader = _EnumLoader(("stdout",), "stdout") +stderrLoader = _EnumLoader(("stderr",), "stderr") +CommandLineToolLoader = _RecordLoader(CommandLineTool) +DockerRequirementLoader = _RecordLoader(DockerRequirement) +SoftwareRequirementLoader = _RecordLoader(SoftwareRequirement) +SoftwarePackageLoader = _RecordLoader(SoftwarePackage) +DirentLoader = _RecordLoader(Dirent) +InitialWorkDirRequirementLoader = _RecordLoader(InitialWorkDirRequirement) +EnvVarRequirementLoader = _RecordLoader(EnvVarRequirement) +ShellCommandRequirementLoader = _RecordLoader(ShellCommandRequirement) +ResourceRequirementLoader = _RecordLoader(ResourceRequirement) +WorkReuseLoader = _RecordLoader(WorkReuse) +NetworkAccessLoader = _RecordLoader(NetworkAccess) +InplaceUpdateRequirementLoader = _RecordLoader(InplaceUpdateRequirement) +ToolTimeLimitLoader = _RecordLoader(ToolTimeLimit) +ExpressionToolOutputParameterLoader = _RecordLoader(ExpressionToolOutputParameter) +WorkflowInputParameterLoader = _RecordLoader(WorkflowInputParameter) +ExpressionToolLoader = _RecordLoader(ExpressionTool) +LinkMergeMethodLoader = _EnumLoader( + ( + "merge_nested", + "merge_flattened", + ), + "LinkMergeMethod", +) +WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter) +WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput) +WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput) +ScatterMethodLoader = _EnumLoader( + ( + "dotproduct", + "nested_crossproduct", + "flat_crossproduct", + ), + "ScatterMethod", +) +WorkflowStepLoader = _RecordLoader(WorkflowStep) +WorkflowLoader = _RecordLoader(Workflow) +SubworkflowFeatureRequirementLoader = _RecordLoader(SubworkflowFeatureRequirement) +ScatterFeatureRequirementLoader = _RecordLoader(ScatterFeatureRequirement) +MultipleInputFeatureRequirementLoader = _RecordLoader(MultipleInputFeatureRequirement) +StepInputExpressionRequirementLoader = _RecordLoader(StepInputExpressionRequirement) +array_of_strtype = _ArrayLoader(strtype) +union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader( + ( + None_type, + strtype, + array_of_strtype, + ) +) +uri_strtype_True_False_None = _URILoader(strtype, True, False, None) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader( + ( + PrimitiveTypeLoader, + RecordSchemaLoader, + EnumSchemaLoader, + ArraySchemaLoader, + strtype, + ) +) +array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype +) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader( + ( + PrimitiveTypeLoader, + RecordSchemaLoader, + EnumSchemaLoader, + ArraySchemaLoader, + strtype, + array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, + 2, +) +array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) +union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_RecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_RecordFieldLoader, "name", "type" +) +enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader = _EnumLoader( + ("record",), "enum_d9cba076fca539106791a4f46d198c7fcfbdb779" +) +typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2 = _TypeDSLLoader( + enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader, 2 +) +uri_array_of_strtype_True_False_None = _URILoader(array_of_strtype, True, False, None) +enum_d961d79c225752b9fadb617367615ab176b47d77Loader = _EnumLoader( + ("enum",), "enum_d961d79c225752b9fadb617367615ab176b47d77" +) +typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2 = _TypeDSLLoader( + enum_d961d79c225752b9fadb617367615ab176b47d77Loader, 2 +) +enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader = _EnumLoader( + ("array",), "enum_d062602be0b4b8fd33e69e29a841317b6ab665bc" +) +typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2 = _TypeDSLLoader( + enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader, 2 +) +File_classLoader = _EnumLoader(("File",), "File_class") +uri_File_classLoader_False_True_None = _URILoader(File_classLoader, False, True, None) +union_of_None_type_or_strtype = _UnionLoader( + ( + None_type, + strtype, + ) +) +uri_union_of_None_type_or_strtype_False_False_None = _URILoader( + union_of_None_type_or_strtype, False, False, None +) +union_of_None_type_or_inttype = _UnionLoader( + ( + None_type, + inttype, + ) +) +union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + FileLoader, + DirectoryLoader, + ) +) +array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( + union_of_FileLoader_or_DirectoryLoader +) +union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + None_type, + array_of_union_of_FileLoader_or_DirectoryLoader, + ) +) +secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _SecondaryDSLLoader( + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader +) +uri_union_of_None_type_or_strtype_True_False_None = _URILoader( + union_of_None_type_or_strtype, True, False, None +) +Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") +uri_Directory_classLoader_False_True_None = _URILoader( + Directory_classLoader, False, True, None +) +union_of_None_type_or_booltype = _UnionLoader( + ( + None_type, + booltype, + ) +) +union_of_None_type_or_LoadListingEnumLoader = _UnionLoader( + ( + None_type, + LoadListingEnumLoader, + ) +) +array_of_SecondaryFileSchemaLoader = _ArrayLoader(SecondaryFileSchemaLoader) +union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _UnionLoader( + ( + None_type, + SecondaryFileSchemaLoader, + array_of_SecondaryFileSchemaLoader, + ) +) +secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _SecondaryDSLLoader( + union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader +) +union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + strtype, + array_of_strtype, + ExpressionLoader, + ) +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, + True, + False, + None, +) +union_of_None_type_or_strtype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + strtype, + ExpressionLoader, + ) +) +uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None = _URILoader( + union_of_None_type_or_strtype_or_ExpressionLoader, True, False, None +) +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + 2, +) +array_of_InputRecordFieldLoader = _ArrayLoader(InputRecordFieldLoader) +union_of_None_type_or_array_of_InputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_InputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_InputRecordFieldLoader, "name", "type" +) +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + OutputRecordSchemaLoader, + OutputEnumSchemaLoader, + OutputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + OutputRecordSchemaLoader, + OutputEnumSchemaLoader, + OutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + 2, +) +array_of_OutputRecordFieldLoader = _ArrayLoader(OutputRecordFieldLoader) +union_of_None_type_or_array_of_OutputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_OutputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_OutputRecordFieldLoader, "name", "type" +) +union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type = _UnionLoader( + ( + None_type, + FileLoader, + DirectoryLoader, + Any_type, + ) +) +union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = _UnionLoader( + ( + CommandInputParameterLoader, + WorkflowInputParameterLoader, + ) +) +array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = ( + _ArrayLoader(union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader) +) +idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = _IdMapLoader( + array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader, + "id", + "type", +) +union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader = _UnionLoader( + ( + CommandOutputParameterLoader, + ExpressionToolOutputParameterLoader, + WorkflowOutputParameterLoader, + ) +) +array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader = _ArrayLoader( + union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader +) +idmap_outputs_array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader = _IdMapLoader( + array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader, + "id", + "type", +) +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( + ( + InlineJavascriptRequirementLoader, + SchemaDefRequirementLoader, + LoadListingRequirementLoader, + DockerRequirementLoader, + SoftwareRequirementLoader, + InitialWorkDirRequirementLoader, + EnvVarRequirementLoader, + ShellCommandRequirementLoader, + ResourceRequirementLoader, + WorkReuseLoader, + NetworkAccessLoader, + InplaceUpdateRequirementLoader, + ToolTimeLimitLoader, + SubworkflowFeatureRequirementLoader, + ScatterFeatureRequirementLoader, + MultipleInputFeatureRequirementLoader, + StepInputExpressionRequirementLoader, + ) +) +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader +) +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( + ( + None_type, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + ) +) +idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + "class", + "None", +) +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( + ( + InlineJavascriptRequirementLoader, + SchemaDefRequirementLoader, + LoadListingRequirementLoader, + DockerRequirementLoader, + SoftwareRequirementLoader, + InitialWorkDirRequirementLoader, + EnvVarRequirementLoader, + ShellCommandRequirementLoader, + ResourceRequirementLoader, + WorkReuseLoader, + NetworkAccessLoader, + InplaceUpdateRequirementLoader, + ToolTimeLimitLoader, + SubworkflowFeatureRequirementLoader, + ScatterFeatureRequirementLoader, + MultipleInputFeatureRequirementLoader, + StepInputExpressionRequirementLoader, + Any_type, + ) +) +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type +) +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( + ( + None_type, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + ) +) +idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + "class", + "None", +) +union_of_None_type_or_CWLVersionLoader = _UnionLoader( + ( + None_type, + CWLVersionLoader, + ) +) +uri_union_of_None_type_or_CWLVersionLoader_False_True_None = _URILoader( + union_of_None_type_or_CWLVersionLoader, False, True, None +) +InlineJavascriptRequirement_classLoader = _EnumLoader( + ("InlineJavascriptRequirement",), "InlineJavascriptRequirement_class" +) +uri_InlineJavascriptRequirement_classLoader_False_True_None = _URILoader( + InlineJavascriptRequirement_classLoader, False, True, None +) +union_of_None_type_or_array_of_strtype = _UnionLoader( + ( + None_type, + array_of_strtype, + ) +) +SchemaDefRequirement_classLoader = _EnumLoader( + ("SchemaDefRequirement",), "SchemaDefRequirement_class" +) +uri_SchemaDefRequirement_classLoader_False_True_None = _URILoader( + SchemaDefRequirement_classLoader, False, True, None +) +union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _UnionLoader( + ( + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + ) +) +array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _ArrayLoader( + union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader +) +union_of_strtype_or_ExpressionLoader = _UnionLoader( + ( + strtype, + ExpressionLoader, + ) +) +union_of_None_type_or_booltype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + booltype, + ExpressionLoader, + ) +) +LoadListingRequirement_classLoader = _EnumLoader( + ("LoadListingRequirement",), "LoadListingRequirement_class" +) +uri_LoadListingRequirement_classLoader_False_True_None = _URILoader( + LoadListingRequirement_classLoader, False, True, None +) +union_of_None_type_or_inttype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + inttype, + ExpressionLoader, + ) +) +union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype = _UnionLoader( + ( + None_type, + strtype, + ExpressionLoader, + array_of_strtype, + ) +) +union_of_None_type_or_ExpressionLoader = _UnionLoader( + ( + None_type, + ExpressionLoader, + ) +) +union_of_None_type_or_CommandLineBindingLoader = _UnionLoader( + ( + None_type, + CommandLineBindingLoader, + ) +) +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + 2, +) +array_of_CommandInputRecordFieldLoader = _ArrayLoader(CommandInputRecordFieldLoader) +union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_CommandInputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" + ) +) +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + 2, +) +union_of_None_type_or_CommandOutputBindingLoader = _UnionLoader( + ( + None_type, + CommandOutputBindingLoader, + ) +) +array_of_CommandOutputRecordFieldLoader = _ArrayLoader(CommandOutputRecordFieldLoader) +union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_CommandOutputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" + ) +) +union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + stdinLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + 2, +) +union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + stdoutLoader, + stderrLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + 2, +) +CommandLineTool_classLoader = _EnumLoader(("CommandLineTool",), "CommandLineTool_class") +uri_CommandLineTool_classLoader_False_True_None = _URILoader( + CommandLineTool_classLoader, False, True, None +) +array_of_CommandInputParameterLoader = _ArrayLoader(CommandInputParameterLoader) +idmap_inputs_array_of_CommandInputParameterLoader = _IdMapLoader( + array_of_CommandInputParameterLoader, "id", "type" +) +array_of_CommandOutputParameterLoader = _ArrayLoader(CommandOutputParameterLoader) +idmap_outputs_array_of_CommandOutputParameterLoader = _IdMapLoader( + array_of_CommandOutputParameterLoader, "id", "type" +) +union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( + ( + strtype, + ExpressionLoader, + CommandLineBindingLoader, + ) +) +array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( + _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) +) +union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( + ( + None_type, + array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + ) +) +array_of_inttype = _ArrayLoader(inttype) +union_of_None_type_or_array_of_inttype = _UnionLoader( + ( + None_type, + array_of_inttype, + ) +) +DockerRequirement_classLoader = _EnumLoader( + ("DockerRequirement",), "DockerRequirement_class" +) +uri_DockerRequirement_classLoader_False_True_None = _URILoader( + DockerRequirement_classLoader, False, True, None +) +SoftwareRequirement_classLoader = _EnumLoader( + ("SoftwareRequirement",), "SoftwareRequirement_class" +) +uri_SoftwareRequirement_classLoader_False_True_None = _URILoader( + SoftwareRequirement_classLoader, False, True, None +) +array_of_SoftwarePackageLoader = _ArrayLoader(SoftwarePackageLoader) +idmap_packages_array_of_SoftwarePackageLoader = _IdMapLoader( + array_of_SoftwarePackageLoader, "package", "specs" +) +uri_union_of_None_type_or_array_of_strtype_False_False_None = _URILoader( + union_of_None_type_or_array_of_strtype, False, False, None +) +InitialWorkDirRequirement_classLoader = _EnumLoader( + ("InitialWorkDirRequirement",), "InitialWorkDirRequirement_class" +) +uri_InitialWorkDirRequirement_classLoader_False_True_None = _URILoader( + InitialWorkDirRequirement_classLoader, False, True, None +) +union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader = _UnionLoader( + ( + None_type, + FileLoader, + array_of_union_of_FileLoader_or_DirectoryLoader, + DirectoryLoader, + DirentLoader, + ExpressionLoader, + ) +) +array_of_union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader = _ArrayLoader( + union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader +) +union_of_array_of_union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader_or_ExpressionLoader = _UnionLoader( + ( + array_of_union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader, + ExpressionLoader, + ) +) +EnvVarRequirement_classLoader = _EnumLoader( + ("EnvVarRequirement",), "EnvVarRequirement_class" +) +uri_EnvVarRequirement_classLoader_False_True_None = _URILoader( + EnvVarRequirement_classLoader, False, True, None +) +array_of_EnvironmentDefLoader = _ArrayLoader(EnvironmentDefLoader) +idmap_envDef_array_of_EnvironmentDefLoader = _IdMapLoader( + array_of_EnvironmentDefLoader, "envName", "envValue" +) +ShellCommandRequirement_classLoader = _EnumLoader( + ("ShellCommandRequirement",), "ShellCommandRequirement_class" +) +uri_ShellCommandRequirement_classLoader_False_True_None = _URILoader( + ShellCommandRequirement_classLoader, False, True, None +) +ResourceRequirement_classLoader = _EnumLoader( + ("ResourceRequirement",), "ResourceRequirement_class" +) +uri_ResourceRequirement_classLoader_False_True_None = _URILoader( + ResourceRequirement_classLoader, False, True, None +) +WorkReuse_classLoader = _EnumLoader(("WorkReuse",), "WorkReuse_class") +uri_WorkReuse_classLoader_False_True_None = _URILoader( + WorkReuse_classLoader, False, True, None +) +union_of_booltype_or_ExpressionLoader = _UnionLoader( + ( + booltype, + ExpressionLoader, + ) +) +NetworkAccess_classLoader = _EnumLoader(("NetworkAccess",), "NetworkAccess_class") +uri_NetworkAccess_classLoader_False_True_None = _URILoader( + NetworkAccess_classLoader, False, True, None +) +InplaceUpdateRequirement_classLoader = _EnumLoader( + ("InplaceUpdateRequirement",), "InplaceUpdateRequirement_class" +) +uri_InplaceUpdateRequirement_classLoader_False_True_None = _URILoader( + InplaceUpdateRequirement_classLoader, False, True, None +) +ToolTimeLimit_classLoader = _EnumLoader(("ToolTimeLimit",), "ToolTimeLimit_class") +uri_ToolTimeLimit_classLoader_False_True_None = _URILoader( + ToolTimeLimit_classLoader, False, True, None +) +union_of_inttype_or_ExpressionLoader = _UnionLoader( + ( + inttype, + ExpressionLoader, + ) +) +union_of_None_type_or_InputBindingLoader = _UnionLoader( + ( + None_type, + InputBindingLoader, + ) +) +ExpressionTool_classLoader = _EnumLoader(("ExpressionTool",), "ExpressionTool_class") +uri_ExpressionTool_classLoader_False_True_None = _URILoader( + ExpressionTool_classLoader, False, True, None +) +array_of_WorkflowInputParameterLoader = _ArrayLoader(WorkflowInputParameterLoader) +idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader( + array_of_WorkflowInputParameterLoader, "id", "type" +) +array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( + ExpressionToolOutputParameterLoader +) +idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( + array_of_ExpressionToolOutputParameterLoader, "id", "type" +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1 = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1 +) +union_of_None_type_or_LinkMergeMethodLoader = _UnionLoader( + ( + None_type, + LinkMergeMethodLoader, + ) +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2 = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2 +) +array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader) +idmap_in__array_of_WorkflowStepInputLoader = _IdMapLoader( + array_of_WorkflowStepInputLoader, "id", "source" +) +union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( + ( + strtype, + WorkflowStepOutputLoader, + ) +) +array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader( + union_of_strtype_or_WorkflowStepOutputLoader +) +union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( + (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) +) +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = ( + _URILoader( + union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, + True, + False, + None, + ) +) +array_of_Any_type = _ArrayLoader(Any_type) +union_of_None_type_or_array_of_Any_type = _UnionLoader( + ( + None_type, + array_of_Any_type, + ) +) +idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( + union_of_None_type_or_array_of_Any_type, "class", "None" +) +union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( + _UnionLoader( + ( + strtype, + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + ) + ) +) +uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None = _URILoader( + union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, + False, + False, + None, +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0 = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 0 +) +union_of_None_type_or_ScatterMethodLoader = _UnionLoader( + ( + None_type, + ScatterMethodLoader, + ) +) +uri_union_of_None_type_or_ScatterMethodLoader_False_True_None = _URILoader( + union_of_None_type_or_ScatterMethodLoader, False, True, None +) +Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") +uri_Workflow_classLoader_False_True_None = _URILoader( + Workflow_classLoader, False, True, None +) +array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) +idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( + array_of_WorkflowOutputParameterLoader, "id", "type" +) +array_of_WorkflowStepLoader = _ArrayLoader(WorkflowStepLoader) +union_of_array_of_WorkflowStepLoader = _UnionLoader((array_of_WorkflowStepLoader,)) +idmap_steps_union_of_array_of_WorkflowStepLoader = _IdMapLoader( + union_of_array_of_WorkflowStepLoader, "id", "None" +) +SubworkflowFeatureRequirement_classLoader = _EnumLoader( + ("SubworkflowFeatureRequirement",), "SubworkflowFeatureRequirement_class" +) +uri_SubworkflowFeatureRequirement_classLoader_False_True_None = _URILoader( + SubworkflowFeatureRequirement_classLoader, False, True, None +) +ScatterFeatureRequirement_classLoader = _EnumLoader( + ("ScatterFeatureRequirement",), "ScatterFeatureRequirement_class" +) +uri_ScatterFeatureRequirement_classLoader_False_True_None = _URILoader( + ScatterFeatureRequirement_classLoader, False, True, None +) +MultipleInputFeatureRequirement_classLoader = _EnumLoader( + ("MultipleInputFeatureRequirement",), "MultipleInputFeatureRequirement_class" +) +uri_MultipleInputFeatureRequirement_classLoader_False_True_None = _URILoader( + MultipleInputFeatureRequirement_classLoader, False, True, None +) +StepInputExpressionRequirement_classLoader = _EnumLoader( + ("StepInputExpressionRequirement",), "StepInputExpressionRequirement_class" +) +uri_StepInputExpressionRequirement_classLoader_False_True_None = _URILoader( + StepInputExpressionRequirement_classLoader, False, True, None +) +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( + ( + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + ) +) +array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( + _ArrayLoader( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader + ) +) +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( + ( + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, + ) +) + + +def load_document( + doc: Any, + baseuri: Optional[str] = None, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + if baseuri is None: + baseuri = file_uri(os.getcwd()) + "/" + if loadingOptions is None: + loadingOptions = LoadingOptions() + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, + doc, + baseuri, + loadingOptions, + ) + return result + + +def load_document_with_metadata( + doc: Any, + baseuri: Optional[str] = None, + loadingOptions: Optional[LoadingOptions] = None, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> Any: + if baseuri is None: + baseuri = file_uri(os.getcwd()) + "/" + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=baseuri) + return _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, + doc, + baseuri, + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + +def load_document_by_string( + string: Any, + uri: str, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + yaml = yaml_no_ts() + result = yaml.load(string) + add_lc_filename(result, uri) + + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=uri) + + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, + result, + uri, + loadingOptions, + ) + return result + + +def load_document_by_yaml( + yaml: Any, + uri: str, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + """ + Shortcut to load via a YAML object. + yaml: must be from ruamel.yaml.main.YAML.load with preserve_quotes=True + """ + add_lc_filename(yaml, uri) + + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=uri) + + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, + yaml, + uri, + loadingOptions, + ) + return result diff --git a/schema_salad/tests/cwl_v1_2.py b/schema_salad/tests/cwl_v1_2.py new file mode 100644 index 000000000..577cccdd8 --- /dev/null +++ b/schema_salad/tests/cwl_v1_2.py @@ -0,0 +1,24809 @@ +# +# This file was autogenerated using schema-salad-tool --codegen=python +# The code itself is released under the Apache 2.0 license and the help text is +# subject to the license of the original schema. +import copy +import logging +import os +import pathlib +import re +import tempfile +import uuid as _uuid__ # pylint: disable=unused-import # noqa: F401 +import xml.sax # nosec +from abc import ABC, abstractmethod +from io import StringIO +from typing import ( + Any, + Dict, + List, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit +from urllib.request import pathname2url + +from rdflib import Graph +from rdflib.plugins.parsers.notation3 import BadSyntax +from ruamel.yaml.comments import CommentedMap, CommentedSeq + +from schema_salad.exceptions import SchemaSaladException, ValidationException +from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher +from schema_salad.sourceline import SourceLine, add_lc_filename +from schema_salad.utils import CacheType, yaml_no_ts # requires schema-salad v8.2+ + +_vocab: Dict[str, str] = {} +_rvocab: Dict[str, str] = {} + +_logger = logging.getLogger("salad") + + +IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] + + +doc_line_info = CommentedMap() +inserted_line_info: Dict[int, int] = {} + + +class LoadingOptions: + idx: IdxType + fileuri: Optional[str] + baseuri: str + namespaces: MutableMapping[str, str] + schemas: MutableSequence[str] + original_doc: Optional[Any] + addl_metadata: MutableMapping[str, Any] + fetcher: Fetcher + vocab: Dict[str, str] + rvocab: Dict[str, str] + cache: CacheType + imports: List[str] + includes: List[str] + + def __init__( + self, + fetcher: Optional[Fetcher] = None, + namespaces: Optional[Dict[str, str]] = None, + schemas: Optional[List[str]] = None, + fileuri: Optional[str] = None, + copyfrom: Optional["LoadingOptions"] = None, + original_doc: Optional[Any] = None, + addl_metadata: Optional[Dict[str, str]] = None, + baseuri: Optional[str] = None, + idx: Optional[IdxType] = None, + imports: Optional[List[str]] = None, + includes: Optional[List[str]] = None, + ) -> None: + """Create a LoadingOptions object.""" + self.original_doc = original_doc + + if idx is not None: + self.idx = idx + else: + self.idx = copyfrom.idx if copyfrom is not None else {} + + if fileuri is not None: + self.fileuri = fileuri + else: + self.fileuri = copyfrom.fileuri if copyfrom is not None else None + + if baseuri is not None: + self.baseuri = baseuri + else: + self.baseuri = copyfrom.baseuri if copyfrom is not None else "" + + if namespaces is not None: + self.namespaces = namespaces + else: + self.namespaces = copyfrom.namespaces if copyfrom is not None else {} + + if schemas is not None: + self.schemas = schemas + else: + self.schemas = copyfrom.schemas if copyfrom is not None else [] + + if addl_metadata is not None: + self.addl_metadata = addl_metadata + else: + self.addl_metadata = copyfrom.addl_metadata if copyfrom is not None else {} + + if imports is not None: + self.imports = imports + else: + self.imports = copyfrom.imports if copyfrom is not None else [] + + if includes is not None: + self.includes = includes + else: + self.includes = copyfrom.includes if copyfrom is not None else [] + + if fetcher is not None: + self.fetcher = fetcher + elif copyfrom is not None: + self.fetcher = copyfrom.fetcher + else: + import requests + from cachecontrol.caches import FileCache + from cachecontrol.wrapper import CacheControl + + root = pathlib.Path(os.environ.get("HOME", tempfile.gettempdir())) + session = CacheControl( + requests.Session(), + cache=FileCache(root / ".cache" / "salad"), + ) + self.fetcher: Fetcher = DefaultFetcher({}, session) + + self.cache = ( + self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} + ) + + self.vocab = _vocab + self.rvocab = _rvocab + + if namespaces is not None: + self.vocab = self.vocab.copy() + self.rvocab = self.rvocab.copy() + for k, v in namespaces.items(): + self.vocab[k] = v + self.rvocab[v] = k + + @property + def graph(self) -> Graph: + """Generate a merged rdflib.Graph from all entries in self.schemas.""" + graph = Graph() + if not self.schemas: + return graph + key = str(hash(tuple(self.schemas))) + if key in self.cache: + return cast(Graph, self.cache[key]) + for schema in self.schemas: + fetchurl = ( + self.fetcher.urljoin(self.fileuri, schema) + if self.fileuri is not None + else pathlib.Path(schema).resolve().as_uri() + ) + if fetchurl not in self.cache or self.cache[fetchurl] is True: + _logger.debug("Getting external schema %s", fetchurl) + try: + content = self.fetcher.fetch_text(fetchurl) + except Exception as e: + _logger.warning( + "Could not load extension schema %s: %s", fetchurl, str(e) + ) + continue + newGraph = Graph() + err_msg = "unknown error" + for fmt in ["xml", "turtle"]: + try: + newGraph.parse(data=content, format=fmt, publicID=str(fetchurl)) + self.cache[fetchurl] = newGraph + graph += newGraph + break + except (xml.sax.SAXParseException, TypeError, BadSyntax) as e: + err_msg = str(e) + else: + _logger.warning( + "Could not load extension schema %s: %s", fetchurl, err_msg + ) + self.cache[key] = graph + return graph + + +class Saveable(ABC): + """Mark classes than have a save() and fromDoc() function.""" + + @classmethod + @abstractmethod + def fromDoc( + cls, + _doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Saveable": + """Construct this object from the result of yaml.load().""" + + @abstractmethod + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + ) -> CommentedMap: + """Convert this object to a JSON/YAML friendly dictionary.""" + + +def load_field(val, fieldtype, baseuri, loadingOptions): + # type: (Union[str, Dict[str, str]], _Loader, str, LoadingOptions) -> Any + if isinstance(val, MutableMapping): + if "$import" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]) + result, metadata = _document_load_by_url( + fieldtype, + url, + loadingOptions, + ) + loadingOptions.imports.append(url) + return result + elif "$include" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url = loadingOptions.fetcher.urljoin( + loadingOptions.fileuri, val["$include"] + ) + val = loadingOptions.fetcher.fetch_text(url) + loadingOptions.includes.append(url) + return fieldtype.load(val, baseuri, loadingOptions) + + +save_type = Optional[ + Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str] +] + + +def add_kv( + old_doc: CommentedMap, + new_doc: CommentedMap, + line_numbers: Dict[Any, Dict[str, int]], + key: str, + val: Any, + max_len: int, + cols: Dict[int, int], + min_col: int = 0, +) -> int: + """Add key value pair into Commented Map. + + Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers + for each key/val pair in the old CommentedMap,key/val pair to insert, max_line of the old CommentedMap, + and max col value taken for each line. + """ + if len(inserted_line_info.keys()) >= 1: + max_line = max(inserted_line_info.keys()) + 1 + else: + max_line = 0 + if ( + key in line_numbers + ): # If the key to insert is in the original CommentedMap as a key + line_info = old_doc.lc.data[key] + if line_info[0] not in inserted_line_info: + new_doc.lc.add_kv_line_col(key, old_doc.lc.data[key]) + inserted_line_info[old_doc.lc.data[key][0]] = old_doc.lc.data[key][1] + else: + line = line_info[0] + while line in inserted_line_info.keys(): + line += 1 + new_doc.lc.add_kv_line_col( + key, + [ + line, + old_doc.lc.data[key][1], + line + (line - old_doc.lc.data[key][2]), + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[line] = old_doc.lc.data[key][1] + return max_len + elif isinstance(val, (int, float, str)) and not isinstance( + val, bool + ): # If the value is hashable + if val in line_numbers: # If the value is in the original CommentedMap + line = line_numbers[val]["line"] + if line in inserted_line_info: + line = max_line + if line in cols: + col = max(line_numbers[val]["col"], cols[line]) + else: + col = line_numbers[val]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + cols[line] = col + len("id") + 2 + return max_len + elif isinstance(val, str): + if val + "?" in line_numbers: + line = line_numbers[val + "?"]["line"] + if line in inserted_line_info: + line = max_line + if line in cols: + col = max(line_numbers[val + "?"]["col"], cols[line]) + else: + col = line_numbers[val + "?"]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + cols[line] = col + len("id") + 2 + return max_len + elif old_doc: + if val in old_doc: + index = old_doc.lc.data.index(val) + line_info = old_doc.lc.data[index] + if line_info[0] not in inserted_line_info: + new_doc.lc.add_kv_line_col(key, old_doc.lc.data[index]) + inserted_line_info[old_doc.lc.data[index][0]] = old_doc.lc.data[ + index + ][1] + else: + new_doc.lc.add_kv_line_col( + key, + [ + max_line, + old_doc.lc.data[index][1], + max_line + (max_line - old_doc.lc.data[index][2]), + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[max_line] = old_doc.lc.data[index][1] + # If neither the key or value is in the original CommentedMap (or value is not hashable) + new_doc.lc.add_kv_line_col( + key, [max_line, min_col, max_line, min_col + len(key) + 2] + ) + inserted_line_info[max_line] = min_col + len(key) + 2 + return max_len + 1 + + +def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: + """Get line numbers for kv pairs in CommentedMap. + + For each key/value pair in a CommentedMap, save the line/col info into a dictionary, + only save value info if value is hashable. + """ + line_numbers: Dict[Any, Dict[str, int]] = {} + if doc is None: + return {} + if doc.lc.data is None: + return {} + for key, value in doc.lc.data.items(): + line_numbers[key] = {} + + line_numbers[key]["line"] = doc.lc.data[key][0] + line_numbers[key]["col"] = doc.lc.data[key][1] + if isinstance(value, (int, float, bool, str)): + line_numbers[value] = {} + line_numbers[value]["line"] = doc.lc.data[key][2] + line_numbers[value]["col"] = doc.lc.data[key][3] + return line_numbers + + +def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> int: + min_col = 0 + for line in line_numbers: + if line_numbers[line]["col"] > min_col: + min_col = line_numbers[line]["col"] + return min_col + + +def get_max_line_num(doc: CommentedMap) -> int: + """Get the max line number for a CommentedMap. + + Iterate through the the key with the highest line number until you reach a non-CommentedMap value + or empty CommentedMap. + """ + max_line = 0 + max_key = "" + cur = doc + while isinstance(cur, CommentedMap) and len(cur) > 0: + for key in cur.lc.data.keys(): + if cur.lc.data[key][2] >= max_line: + max_line = cur.lc.data[key][2] + max_key = key + cur = cur[max_key] + return max_line + 1 + + +def save( + val: Any, + top: bool = True, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, +) -> save_type: + """Save a val of any type. + + Recursively calls save method from class if val is of type Saveable. + Otherwise, saves val to CommentedMap or CommentedSeq. + """ + if keys is None: + keys = [] + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if isinstance(val, Saveable): + return val.save( + top=top, base_url=base_url, relative_uris=relative_uris, keys=keys + ) + if isinstance(val, MutableSequence): + r = CommentedSeq() + r.lc.data = {} + for i in range(0, len(val)): + new_keys = keys + if doc: + if str(i) in doc: + r.lc.data[i] = doc.lc.data[i] + new_keys.append(i) + r.append( + save( + val[i], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, + ) + ) + return r + # return [ + # save(v, top=False, base_url=base_url, relative_uris=relative_uris) + # for v in val + # ] + if isinstance(val, MutableMapping): + newdict = CommentedMap() + new_keys = keys + for key in val: + if doc: + if key in doc: + newdict.lc.add_kv_line_col(key, doc.lc.data[key]) + new_keys.append(key) + + newdict[key] = save( + val[key], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, + ) + return newdict + # newdict = {} + # for key in val: + # newdict[key] = save( + # val[key], top=False, base_url=base_url, relative_uris=relative_uris + # ) + # return newdict + if val is None or isinstance(val, (int, float, bool, str)): + return val + raise Exception("Not Saveable: %s" % type(val)) + + +def save_with_metadata( + val: Any, + valLoadingOpts: LoadingOptions, + top: bool = True, + base_url: str = "", + relative_uris: bool = True, +) -> save_type: + """Save and set $namespaces, $schemas, $base and any other metadata fields at the top level.""" + saved_val = save(val, top, base_url, relative_uris) + newdict: MutableMapping[str, Any] = {} + if isinstance(saved_val, MutableSequence): + newdict = {"$graph": saved_val} + elif isinstance(saved_val, MutableMapping): + newdict = saved_val + + if valLoadingOpts.namespaces: + newdict["$namespaces"] = valLoadingOpts.namespaces + if valLoadingOpts.schemas: + newdict["$schemas"] = valLoadingOpts.schemas + if valLoadingOpts.baseuri: + newdict["$base"] = valLoadingOpts.baseuri + for k, v in valLoadingOpts.addl_metadata.items(): + if k not in newdict: + newdict[k] = v + + return newdict + + +def expand_url( + url, # type: str + base_url, # type: str + loadingOptions, # type: LoadingOptions + scoped_id=False, # type: bool + vocab_term=False, # type: bool + scoped_ref=None, # type: Optional[int] +): + # type: (...) -> str + if url in ("@id", "@type"): + return url + + if vocab_term and url in loadingOptions.vocab: + return url + + if bool(loadingOptions.vocab) and ":" in url: + prefix = url.split(":")[0] + if prefix in loadingOptions.vocab: + url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :] + + split = urlsplit(url) + + if ( + ( + bool(split.scheme) + and split.scheme in loadingOptions.fetcher.supported_schemes() + ) + or url.startswith("$(") + or url.startswith("${") + ): + pass + elif scoped_id and not bool(split.fragment): + splitbase = urlsplit(base_url) + frg = "" + if bool(splitbase.fragment): + frg = splitbase.fragment + "/" + split.path + else: + frg = split.path + pt = splitbase.path if splitbase.path != "" else "/" + url = urlunsplit((splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg)) + elif scoped_ref is not None and not bool(split.fragment): + splitbase = urlsplit(base_url) + sp = splitbase.fragment.split("/") + n = scoped_ref + while n > 0 and len(sp) > 0: + sp.pop() + n -= 1 + sp.append(url) + url = urlunsplit( + ( + splitbase.scheme, + splitbase.netloc, + splitbase.path, + splitbase.query, + "/".join(sp), + ) + ) + else: + url = loadingOptions.fetcher.urljoin(base_url, url) + + if vocab_term: + split = urlsplit(url) + if bool(split.scheme): + if url in loadingOptions.rvocab: + return loadingOptions.rvocab[url] + else: + raise ValidationException(f"Term '{url}' not in vocabulary") + + return url + + +class _Loader: + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + pass + + +class _AnyLoader(_Loader): + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if doc is not None: + return doc + raise ValidationException("Expected non-null") + + +class _PrimitiveLoader(_Loader): + def __init__(self, tp): + # type: (Union[type, Tuple[Type[str], Type[str]]]) -> None + self.tp = tp + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, self.tp): + raise ValidationException( + "Expected a {} but got {}".format( + self.tp.__class__.__name__, doc.__class__.__name__ + ) + ) + return doc + + def __repr__(self): # type: () -> str + return str(self.tp) + + +class _ArrayLoader(_Loader): + def __init__(self, items): + # type: (_Loader) -> None + self.items = items + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, MutableSequence): + raise ValidationException(f"Expected a list, was {type(doc)}") + r = [] # type: List[Any] + errors = [] # type: List[SchemaSaladException] + for i in range(0, len(doc)): + try: + lf = load_field( + doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions + ) + if isinstance(lf, MutableSequence): + r.extend(lf) + else: + r.append(lf) + except ValidationException as e: + errors.append(e.with_sourceline(SourceLine(doc, i, str))) + if errors: + raise ValidationException("", None, errors) + return r + + def __repr__(self): # type: () -> str + return f"array<{self.items}>" + + +class _EnumLoader(_Loader): + def __init__(self, symbols: Sequence[str], name: str) -> None: + self.symbols = symbols + self.name = name + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if doc in self.symbols: + return doc + else: + raise ValidationException(f"Expected one of {self.symbols}") + + def __repr__(self): # type: () -> str + return self.name + + +class _SecondaryDSLLoader(_Loader): + def __init__(self, inner): + # type: (_Loader) -> None + self.inner = inner + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + r: List[Dict[str, Any]] = [] + if isinstance(doc, MutableSequence): + for d in doc: + if isinstance(d, str): + if d.endswith("?"): + r.append({"pattern": d[:-1], "required": False}) + else: + r.append({"pattern": d}) + elif isinstance(d, dict): + new_dict: Dict[str, Any] = {} + dict_copy = copy.deepcopy(d) + if "pattern" in dict_copy: + new_dict["pattern"] = dict_copy.pop("pattern") + else: + raise ValidationException( + "Missing pattern in secondaryFiles specification entry: {}".format( + d + ) + ) + new_dict["required"] = ( + dict_copy.pop("required") if "required" in dict_copy else None + ) + + if len(dict_copy): + raise ValidationException( + "Unallowed values in secondaryFiles specification entry: {}".format( + dict_copy + ) + ) + r.append(new_dict) + + else: + raise ValidationException( + "Expected a string or sequence of (strings or mappings)." + ) + elif isinstance(doc, MutableMapping): + new_dict = {} + doc_copy = copy.deepcopy(doc) + if "pattern" in doc_copy: + new_dict["pattern"] = doc_copy.pop("pattern") + else: + raise ValidationException( + "Missing pattern in secondaryFiles specification entry: {}".format( + doc + ) + ) + new_dict["required"] = ( + doc_copy.pop("required") if "required" in doc_copy else None + ) + + if len(doc_copy): + raise ValidationException( + "Unallowed values in secondaryFiles specification entry: {}".format( + doc_copy + ) + ) + r.append(new_dict) + + elif isinstance(doc, str): + if doc.endswith("?"): + r.append({"pattern": doc[:-1], "required": False}) + else: + r.append({"pattern": doc}) + else: + raise ValidationException("Expected str or sequence of str") + return self.inner.load(r, baseuri, loadingOptions, docRoot) + + +class _RecordLoader(_Loader): + def __init__(self, classtype): + # type: (Type[Saveable]) -> None + self.classtype = classtype + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, MutableMapping): + raise ValidationException(f"Expected a dict, was {type(doc)}") + return self.classtype.fromDoc(doc, baseuri, loadingOptions, docRoot=docRoot) + + def __repr__(self): # type: () -> str + return str(self.classtype.__name__) + + +class _ExpressionLoader(_Loader): + def __init__(self, items: Type[str]) -> None: + self.items = items + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, str): + raise ValidationException(f"Expected a str, was {type(doc)}") + return doc + + +class _UnionLoader(_Loader): + def __init__(self, alternates: Sequence[_Loader]) -> None: + self.alternates = alternates + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + errors = [] + for t in self.alternates: + try: + return t.load(doc, baseuri, loadingOptions, docRoot=docRoot) + except ValidationException as e: + errors.append(ValidationException(f"tried {t} but", None, [e])) + raise ValidationException("", None, errors, "-") + + def __repr__(self): # type: () -> str + return " | ".join(str(a) for a in self.alternates) + + +class _URILoader(_Loader): + def __init__(self, inner, scoped_id, vocab_term, scoped_ref): + # type: (_Loader, bool, bool, Union[int, None]) -> None + self.inner = inner + self.scoped_id = scoped_id + self.vocab_term = vocab_term + self.scoped_ref = scoped_ref + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableSequence): + newdoc = [] + for i in doc: + if isinstance(i, str): + newdoc.append( + expand_url( + i, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + ) + else: + newdoc.append(i) + doc = newdoc + elif isinstance(doc, str): + doc = expand_url( + doc, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + return self.inner.load(doc, baseuri, loadingOptions) + + +class _TypeDSLLoader(_Loader): + typeDSLregex = re.compile(r"^([^[?]+)(\[\])?(\?)?$") + + def __init__(self, inner, refScope): + # type: (_Loader, Union[int, None]) -> None + self.inner = inner + self.refScope = refScope + + def resolve( + self, + doc, # type: str + baseuri, # type: str + loadingOptions, # type: LoadingOptions + ): + # type: (...) -> Union[List[Union[Dict[str, str], str]], Dict[str, str], str] + m = self.typeDSLregex.match(doc) + if m: + group1 = m.group(1) + assert group1 is not None # nosec + first = expand_url( + group1, baseuri, loadingOptions, False, True, self.refScope + ) + second = third = None + if bool(m.group(2)): + second = {"type": "array", "items": first} + # second = CommentedMap((("type", "array"), + # ("items", first))) + # second.lc.add_kv_line_col("type", lc) + # second.lc.add_kv_line_col("items", lc) + # second.lc.filename = filename + if bool(m.group(3)): + third = ["null", second or first] + # third = CommentedSeq(["null", second or first]) + # third.lc.add_kv_line_col(0, lc) + # third.lc.add_kv_line_col(1, lc) + # third.lc.filename = filename + return third or second or first + return doc + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableSequence): + r = [] # type: List[Any] + for d in doc: + if isinstance(d, str): + resolved = self.resolve(d, baseuri, loadingOptions) + if isinstance(resolved, MutableSequence): + for i in resolved: + if i not in r: + r.append(i) + else: + if resolved not in r: + r.append(resolved) + else: + r.append(d) + doc = r + elif isinstance(doc, str): + doc = self.resolve(doc, baseuri, loadingOptions) + + return self.inner.load(doc, baseuri, loadingOptions) + + +class _IdMapLoader(_Loader): + def __init__(self, inner, mapSubject, mapPredicate): + # type: (_Loader, str, Union[str, None]) -> None + self.inner = inner + self.mapSubject = mapSubject + self.mapPredicate = mapPredicate + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableMapping): + r = [] # type: List[Any] + for k in sorted(doc.keys()): + val = doc[k] + if isinstance(val, CommentedMap): + v = copy.copy(val) + v.lc.data = val.lc.data + v.lc.filename = val.lc.filename + v[self.mapSubject] = k + r.append(v) + elif isinstance(val, MutableMapping): + v2 = copy.copy(val) + v2[self.mapSubject] = k + r.append(v2) + else: + if self.mapPredicate: + v3 = {self.mapPredicate: val} + v3[self.mapSubject] = k + r.append(v3) + else: + raise ValidationException("No mapPredicate") + doc = r + return self.inner.load(doc, baseuri, loadingOptions) + + +def _document_load( + loader: _Loader, + doc: Union[CommentedMap, str, MutableMapping[str, Any], MutableSequence[Any]], + baseuri: str, + loadingOptions: LoadingOptions, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> Tuple[Any, LoadingOptions]: + if isinstance(doc, str): + return _document_load_by_url( + loader, + loadingOptions.fetcher.urljoin(baseuri, doc), + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + if isinstance(doc, MutableMapping): + addl_metadata = {} + if addl_metadata_fields is not None: + for mf in addl_metadata_fields: + if mf in doc: + addl_metadata[mf] = doc[mf] + + docuri = baseuri + if "$base" in doc: + baseuri = doc["$base"] + + loadingOptions = LoadingOptions( + copyfrom=loadingOptions, + namespaces=doc.get("$namespaces", None), + schemas=doc.get("$schemas", None), + baseuri=doc.get("$base", None), + addl_metadata=addl_metadata, + ) + + # doc = { + # k: v + # for k, v in doc.items() + # if k not in ("$namespaces", "$schemas", "$base") + # } + doc = copy.copy(doc) + if "$namespaces" in doc: + doc.pop("$namespaces") + if "$schemas" in doc: + doc.pop("$schemas") + if "$base" in doc: + doc.pop("$base") + + if isinstance(doc, CommentedMap): + global doc_line_info + doc_line_info = doc + + if "$graph" in doc: + loadingOptions.idx[baseuri] = ( + loader.load(doc["$graph"], baseuri, loadingOptions), + loadingOptions, + ) + else: + loadingOptions.idx[baseuri] = ( + loader.load(doc, baseuri, loadingOptions, docRoot=baseuri), + loadingOptions, + ) + + if docuri != baseuri: + loadingOptions.idx[docuri] = loadingOptions.idx[baseuri] + + return loadingOptions.idx[baseuri] + if isinstance(doc, MutableSequence): + loadingOptions.idx[baseuri] = ( + loader.load(doc, baseuri, loadingOptions), + loadingOptions, + ) + return loadingOptions.idx[baseuri] + + raise ValidationException( + "Expected URI string, MutableMapping or MutableSequence, got %s" % type(doc) + ) + + +def _document_load_by_url( + loader: _Loader, + url: str, + loadingOptions: LoadingOptions, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> Tuple[Any, LoadingOptions]: + if url in loadingOptions.idx: + return loadingOptions.idx[url] + + doc_url, frg = urldefrag(url) + + text = loadingOptions.fetcher.fetch_text(doc_url) + if isinstance(text, bytes): + textIO = StringIO(text.decode("utf-8")) + else: + textIO = StringIO(text) + textIO.name = str(doc_url) + yaml = yaml_no_ts() + result = yaml.load(textIO) + add_lc_filename(result, doc_url) + + loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=doc_url) + + _document_load( + loader, + result, + doc_url, + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + return loadingOptions.idx[url] + + +def file_uri(path, split_frag=False): # type: (str, bool) -> str + if path.startswith("file://"): + return path + if split_frag: + pathsp = path.split("#", 2) + frag = "#" + quote(str(pathsp[1])) if len(pathsp) == 2 else "" + urlpath = pathname2url(str(pathsp[0])) + else: + urlpath = pathname2url(path) + frag = "" + if urlpath.startswith("//"): + return f"file:{urlpath}{frag}" + else: + return f"file://{urlpath}{frag}" + + +def prefix_url(url: str, namespaces: Dict[str, str]) -> str: + """Expand short forms into full URLs using the given namespace dictionary.""" + for k, v in namespaces.items(): + if url.startswith(v): + return k + ":" + url[len(v) :] + return url + + +def save_relative_uri( + uri: Any, + base_url: str, + scoped_id: bool, + ref_scope: Optional[int], + relative_uris: bool, +) -> Any: + """Convert any URI to a relative one, obeying the scoping rules.""" + if isinstance(uri, MutableSequence): + return [ + save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) + for u in uri + ] + elif isinstance(uri, str): + if not relative_uris or uri == base_url: + return uri + urisplit = urlsplit(uri) + basesplit = urlsplit(base_url) + if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: + if urisplit.path != basesplit.path: + p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) + if urisplit.fragment: + p = p + "#" + urisplit.fragment + return p + + basefrag = basesplit.fragment + "/" + if ref_scope: + sp = basefrag.split("/") + i = 0 + while i < ref_scope: + sp.pop() + i += 1 + basefrag = "/".join(sp) + + if urisplit.fragment.startswith(basefrag): + return urisplit.fragment[len(basefrag) :] + else: + return urisplit.fragment + return uri + else: + return save(uri, top=False, base_url=base_url, relative_uris=relative_uris) + + +def shortname(inputid: str) -> str: + """ + Compute the shortname of a fully qualified identifier. + + See https://w3id.org/cwl/v1.2/SchemaSalad.html#Short_names. + """ + parsed_id = urlparse(inputid) + if parsed_id.fragment: + return parsed_id.fragment.split("/")[-1] + return parsed_id.path.split("/")[-1] + + +def parser_info() -> str: + return "org.w3id.cwl.v1_2" + + +class Documented(Saveable): + pass + + +class RecordField(Documented): + """ + A field of a record. + """ + + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, RecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash((self.doc, self.name, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "RecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'RecordField'", None, _errors__) + _constructed = cls( + doc=doc, + name=name, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["doc", "name", "type"]) + + +class RecordSchema(Saveable): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, RecordSchema): + return bool(self.fields == other.fields and self.type == other.type) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "RecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'RecordSchema'", None, _errors__) + _constructed = cls( + fields=fields, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type"]) + + +class EnumSchema(Saveable): + """ + Define an enumerated type. + + """ + + def __init__( + self, + symbols: Any, + type: Any, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.symbols = symbols + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.symbols, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "EnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'EnumSchema'", None, _errors__) + _constructed = cls( + name=name, + symbols=symbols, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type"]) + + +class ArraySchema(Saveable): + def __init__( + self, + items: Any, + type: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ArraySchema): + return bool(self.items == other.items and self.type == other.type) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + items = load_field( + _doc.get("items"), + uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ArraySchema'", None, _errors__) + _constructed = cls( + items=items, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.items is not None and "items" not in r: + u = save_relative_uri(self.items, base_url, False, 2, relative_uris) + r["items"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type"]) + + +class File(Saveable): + """ + Represents a file (or group of files when `secondaryFiles` is provided) that + will be accessible by tools using standard POSIX file system call API such as + open(2) and read(2). + + Files are represented as objects with `class` of `File`. File objects have + a number of properties that provide metadata about the file. + + The `location` property of a File is a IRI that uniquely identifies the + file. Implementations must support the `file://` IRI scheme and may support + other schemes such as `http://` and `https://`. The value of `location` may also be a + relative reference, in which case it must be resolved relative to the IRI + of the document it appears in. Alternately to `location`, implementations + must also accept the `path` property on File, which must be a filesystem + path available on the same host as the CWL runner (for inputs) or the + runtime environment of a command line tool execution (for command line tool + outputs). + + If no `location` or `path` is specified, a file object must specify + `contents` with the UTF-8 text content of the file. This is a "file + literal". File literals do not correspond to external resources, but are + created on disk with `contents` with when needed for executing a tool. + Where appropriate, expressions can return file literals to define new files + on a runtime. The maximum size of `contents` is 64 kilobytes. + + The `basename` property defines the filename on disk where the file is + staged. This may differ from the resource name. If not provided, + `basename` must be computed from the last path part of `location` and made + available to expressions. + + The `secondaryFiles` property is a list of File or Directory objects that + must be staged in the same directory as the primary file. It is an error + for file names to be duplicated in `secondaryFiles`. + + The `size` property is the size in bytes of the File. It must be computed + from the resource and made available to expressions. The `checksum` field + contains a cryptographic hash of the file content for use it verifying file + contents. Implementations may, at user option, enable or disable + computation of the `checksum` field for performance or other reasons. + However, the ability to compute output checksums is required to pass the + CWL conformance test suite. + + When executing a CommandLineTool, the files and secondary files may be + staged to an arbitrary directory, but must use the value of `basename` for + the filename. The `path` property must be file path in the context of the + tool execution runtime (local to the compute node, or within the executing + container). All computed properties should be available to expressions. + File literals also must be staged and `path` must be set. + + When collecting CommandLineTool outputs, `glob` matching returns file paths + (with the `path` property) and the derived properties. This can all be + modified by `outputEval`. Alternately, if the file `cwl.output.json` is + present in the output, `outputBinding` is ignored. + + File objects in the output must provide either a `location` IRI or a `path` + property in the context of the tool execution runtime (local to the compute + node, or within the executing container). + + When evaluating an ExpressionTool, file objects must be referenced via + `location` (the expression tool does not have access to files on disk so + `path` is meaningless) or as file literals. It is legal to return a file + object with an existing `location` but a different `basename`. The + `loadContents` field of ExpressionTool inputs behaves the same as on + CommandLineTool inputs, however it is not meaningful on the outputs. + + An ExpressionTool may forward file references from input to output by using + the same value for `location`. + + """ + + def __init__( + self, + location: Optional[Any] = None, + path: Optional[Any] = None, + basename: Optional[Any] = None, + dirname: Optional[Any] = None, + nameroot: Optional[Any] = None, + nameext: Optional[Any] = None, + checksum: Optional[Any] = None, + size: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + format: Optional[Any] = None, + contents: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "File" + self.location = location + self.path = path + self.basename = basename + self.dirname = dirname + self.nameroot = nameroot + self.nameext = nameext + self.checksum = checksum + self.size = size + self.secondaryFiles = secondaryFiles + self.format = format + self.contents = contents + + def __eq__(self, other: Any) -> bool: + if isinstance(other, File): + return bool( + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.dirname == other.dirname + and self.nameroot == other.nameroot + and self.nameext == other.nameext + and self.checksum == other.checksum + and self.size == other.size + and self.secondaryFiles == other.secondaryFiles + and self.format == other.format + and self.contents == other.contents + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.location, + self.path, + self.basename, + self.dirname, + self.nameroot, + self.nameext, + self.checksum, + self.size, + self.secondaryFiles, + self.format, + self.contents, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "File": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "File": + raise ValidationException("Not a File") + + if "location" in _doc: + try: + location = load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), + [e], + ) + ) + else: + location = None + if "path" in _doc: + try: + path = load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [e], + ) + ) + else: + path = None + if "basename" in _doc: + try: + basename = load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + ) + ) + else: + basename = None + if "dirname" in _doc: + try: + dirname = load_field( + _doc.get("dirname"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dirname` field is not valid because:", + SourceLine(_doc, "dirname", str), + [e], + ) + ) + else: + dirname = None + if "nameroot" in _doc: + try: + nameroot = load_field( + _doc.get("nameroot"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `nameroot` field is not valid because:", + SourceLine(_doc, "nameroot", str), + [e], + ) + ) + else: + nameroot = None + if "nameext" in _doc: + try: + nameext = load_field( + _doc.get("nameext"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `nameext` field is not valid because:", + SourceLine(_doc, "nameext", str), + [e], + ) + ) + else: + nameext = None + if "checksum" in _doc: + try: + checksum = load_field( + _doc.get("checksum"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `checksum` field is not valid because:", + SourceLine(_doc, "checksum", str), + [e], + ) + ) + else: + checksum = None + if "size" in _doc: + try: + size = load_field( + _doc.get("size"), + union_of_None_type_or_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `size` field is not valid because:", + SourceLine(_doc, "size", str), + [e], + ) + ) + else: + size = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "contents" in _doc: + try: + contents = load_field( + _doc.get("contents"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `contents` field is not valid because:", + SourceLine(_doc, "contents", str), + [e], + ) + ) + else: + contents = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `dirname`, `nameroot`, `nameext`, `checksum`, `size`, `secondaryFiles`, `format`, `contents`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'File'", None, _errors__) + _constructed = cls( + location=location, + path=path, + basename=basename, + dirname=dirname, + nameroot=nameroot, + nameext=nameext, + checksum=checksum, + size=size, + secondaryFiles=secondaryFiles, + format=format, + contents=contents, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "File" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.location is not None and "location" not in r: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="location", + val=r.get("location"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.path is not None and "path" not in r: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="path", + val=r.get("path"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.basename is not None and "basename" not in r: + r["basename"] = save( + self.basename, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="basename", + val=r.get("basename"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dirname is not None and "dirname" not in r: + r["dirname"] = save( + self.dirname, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dirname", + val=r.get("dirname"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.nameroot is not None and "nameroot" not in r: + r["nameroot"] = save( + self.nameroot, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="nameroot", + val=r.get("nameroot"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.nameext is not None and "nameext" not in r: + r["nameext"] = save( + self.nameext, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="nameext", + val=r.get("nameext"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.checksum is not None and "checksum" not in r: + r["checksum"] = save( + self.checksum, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="checksum", + val=r.get("checksum"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.size is not None and "size" not in r: + r["size"] = save( + self.size, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="size", + val=r.get("size"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, base_url, True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.contents is not None and "contents" not in r: + r["contents"] = save( + self.contents, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="contents", + val=r.get("contents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "location", + "path", + "basename", + "dirname", + "nameroot", + "nameext", + "checksum", + "size", + "secondaryFiles", + "format", + "contents", + ] + ) + + +class Directory(Saveable): + """ + Represents a directory to present to a command line tool. + + Directories are represented as objects with `class` of `Directory`. Directory objects have + a number of properties that provide metadata about the directory. + + The `location` property of a Directory is a IRI that uniquely identifies + the directory. Implementations must support the file:// IRI scheme and may + support other schemes such as http://. Alternately to `location`, + implementations must also accept the `path` property on Directory, which + must be a filesystem path available on the same host as the CWL runner (for + inputs) or the runtime environment of a command line tool execution (for + command line tool outputs). + + A Directory object may have a `listing` field. This is a list of File and + Directory objects that are contained in the Directory. For each entry in + `listing`, the `basename` property defines the name of the File or + Subdirectory when staged to disk. If `listing` is not provided, the + implementation must have some way of fetching the Directory listing at + runtime based on the `location` field. + + If a Directory does not have `location`, it is a Directory literal. A + Directory literal must provide `listing`. Directory literals must be + created on disk at runtime as needed. + + The resources in a Directory literal do not need to have any implied + relationship in their `location`. For example, a Directory listing may + contain two files located on different hosts. It is the responsibility of + the runtime to ensure that those files are staged to disk appropriately. + Secondary files associated with files in `listing` must also be staged to + the same Directory. + + When executing a CommandLineTool, Directories must be recursively staged + first and have local values of `path` assigned. + + Directory objects in CommandLineTool output must provide either a + `location` IRI or a `path` property in the context of the tool execution + runtime (local to the compute node, or within the executing container). + + An ExpressionTool may forward file references from input to output by using + the same value for `location`. + + Name conflicts (the same `basename` appearing multiple times in `listing` + or in any entry in `secondaryFiles` in the listing) is a fatal error. + + """ + + def __init__( + self, + location: Optional[Any] = None, + path: Optional[Any] = None, + basename: Optional[Any] = None, + listing: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "Directory" + self.location = location + self.path = path + self.basename = basename + self.listing = listing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Directory): + return bool( + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.listing == other.listing + ) + return False + + def __hash__(self) -> int: + return hash( + (self.class_, self.location, self.path, self.basename, self.listing) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Directory": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "Directory": + raise ValidationException("Not a Directory") + + if "location" in _doc: + try: + location = load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), + [e], + ) + ) + else: + location = None + if "path" in _doc: + try: + path = load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [e], + ) + ) + else: + path = None + if "basename" in _doc: + try: + basename = load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + ) + ) + else: + basename = None + if "listing" in _doc: + try: + listing = load_field( + _doc.get("listing"), + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [e], + ) + ) + else: + listing = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `listing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Directory'", None, _errors__) + _constructed = cls( + location=location, + path=path, + basename=basename, + listing=listing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "Directory" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.location is not None and "location" not in r: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="location", + val=r.get("location"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.path is not None and "path" not in r: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="path", + val=r.get("path"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.basename is not None and "basename" not in r: + r["basename"] = save( + self.basename, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="basename", + val=r.get("basename"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.listing is not None and "listing" not in r: + r["listing"] = save( + self.listing, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="listing", + val=r.get("listing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "location", "path", "basename", "listing"]) + + +class Labeled(Saveable): + pass + + +class Identified(Saveable): + pass + + +class LoadContents(Saveable): + pass + + +class FieldBase(Labeled): + pass + + +class InputFormat(Saveable): + pass + + +class OutputFormat(Saveable): + pass + + +class Parameter(FieldBase, Documented, Identified): + """ + Define an input or output parameter to a process. + + """ + + pass + + +class InputBinding(Saveable): + def __init__( + self, + loadContents: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputBinding): + return bool(self.loadContents == other.loadContents) + return False + + def __hash__(self) -> int: + return hash((self.loadContents)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputBinding": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `loadContents`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputBinding'", None, _errors__) + _constructed = cls( + loadContents=loadContents, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["loadContents"]) + + +class IOSchema(Labeled, Documented): + pass + + +class InputSchema(IOSchema): + pass + + +class OutputSchema(IOSchema): + pass + + +class InputRecordField(RecordField, FieldBase, InputFormat, LoadContents): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.loadContents, + self.loadListing, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputRecordField'", None, _errors__) + _constructed = cls( + doc=doc, + name=name, + type=type, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + loadContents=loadContents, + loadListing=loadListing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "loadContents", + "loadListing", + ] + ) + + +class InputRecordSchema(RecordSchema, InputSchema): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputRecordSchema'", None, _errors__) + _constructed = cls( + fields=fields, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name"]) + + +class InputEnumSchema(EnumSchema, InputSchema): + def __init__( + self, + symbols: Any, + type: Any, + name: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.symbols = symbols + self.type = type + self.label = label + self.doc = doc + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputEnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.symbols, self.type, self.label, self.doc)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputEnumSchema'", None, _errors__) + _constructed = cls( + name=name, + symbols=symbols, + type=type, + label=label, + doc=doc, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type", "label", "doc"]) + + +class InputArraySchema(ArraySchema, InputSchema): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + items = load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputArraySchema'", None, _errors__) + _constructed = cls( + items=items, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.items is not None and "items" not in r: + u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) + r["items"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name"]) + + +class OutputRecordField(RecordField, FieldBase, OutputFormat): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputRecordField'", None, _errors__) + _constructed = cls( + doc=doc, + name=name, + type=type, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + ["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"] + ) + + +class OutputRecordSchema(RecordSchema, OutputSchema): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputRecordSchema'", None, _errors__) + _constructed = cls( + fields=fields, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name"]) + + +class OutputEnumSchema(EnumSchema, OutputSchema): + def __init__( + self, + symbols: Any, + type: Any, + name: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.symbols = symbols + self.type = type + self.label = label + self.doc = doc + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputEnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.symbols, self.type, self.label, self.doc)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputEnumSchema'", None, _errors__) + _constructed = cls( + name=name, + symbols=symbols, + type=type, + label=label, + doc=doc, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type", "label", "doc"]) + + +class OutputArraySchema(ArraySchema, OutputSchema): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + items = load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputArraySchema'", None, _errors__) + _constructed = cls( + items=items, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.items is not None and "items" not in r: + u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) + r["items"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name"]) + + +class InputParameter(Parameter, InputFormat, LoadContents): + pass + + +class OutputParameter(Parameter, OutputFormat): + pass + + +class ProcessRequirement(Saveable): + """ + A process requirement declares a prerequisite that may or must be fulfilled + before executing a process. See [`Process.hints`](#process) and + [`Process.requirements`](#process). + + Process requirements are the primary mechanism for specifying extensions to + the CWL core specification. + + """ + + pass + + +class Process(Identified, Labeled, Documented): + """ + + The base executable type in CWL is the `Process` object defined by the + document. Note that the `Process` object is abstract and cannot be + directly executed. + + """ + + pass + + +class InlineJavascriptRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support inline Javascript expressions. + If this requirement is not present, the workflow platform must not perform expression + interpolation. + + """ + + def __init__( + self, + expressionLib: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "InlineJavascriptRequirement" + self.expressionLib = expressionLib + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InlineJavascriptRequirement): + return bool( + self.class_ == other.class_ + and self.expressionLib == other.expressionLib + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.expressionLib)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InlineJavascriptRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "InlineJavascriptRequirement": + raise ValidationException("Not a InlineJavascriptRequirement") + + if "expressionLib" in _doc: + try: + expressionLib = load_field( + _doc.get("expressionLib"), + union_of_None_type_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `expressionLib` field is not valid because:", + SourceLine(_doc, "expressionLib", str), + [e], + ) + ) + else: + expressionLib = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `expressionLib`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'InlineJavascriptRequirement'", None, _errors__ + ) + _constructed = cls( + expressionLib=expressionLib, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "InlineJavascriptRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.expressionLib is not None and "expressionLib" not in r: + r["expressionLib"] = save( + self.expressionLib, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="expressionLib", + val=r.get("expressionLib"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "expressionLib"]) + + +class CommandInputSchema(Saveable): + pass + + +class SchemaDefRequirement(ProcessRequirement): + """ + This field consists of an array of type definitions which must be used when + interpreting the `inputs` and `outputs` fields. When a `type` field + contains a IRI, the implementation must check if the type is defined in + `schemaDefs` and use that definition. If the type is not found in + `schemaDefs`, it is an error. The entries in `schemaDefs` must be + processed in the order listed such that later schema definitions may refer + to earlier schema definitions. + + - **Type definitions are allowed for `enum` and `record` types only.** + - Type definitions may be shared by defining them in a file and then + `$include`-ing them in the `types` field. + - A file can contain a list of type definitions + + """ + + def __init__( + self, + types: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SchemaDefRequirement" + self.types = types + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SchemaDefRequirement): + return bool(self.class_ == other.class_ and self.types == other.types) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.types)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SchemaDefRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "SchemaDefRequirement": + raise ValidationException("Not a SchemaDefRequirement") + + try: + types = load_field( + _doc.get("types"), + array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `types` field is not valid because:", + SourceLine(_doc, "types", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `types`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SchemaDefRequirement'", None, _errors__) + _constructed = cls( + types=types, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "SchemaDefRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.types is not None and "types" not in r: + r["types"] = save( + self.types, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="types", + val=r.get("types"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "types"]) + + +class SecondaryFileSchema(Saveable): + """ + Secondary files are specified using the following micro-DSL for secondary files: + + * If the value is a string, it is transformed to an object with two fields + `pattern` and `required` + * By default, the value of `required` is `null` + (this indicates default behavior, which may be based on the context) + * If the value ends with a question mark `?` the question mark is + stripped off and the value of the field `required` is set to `False` + * The remaining value is assigned to the field `pattern` + + For implementation details and examples, please see + [this section](SchemaSalad.html#Domain_Specific_Language_for_secondary_files) + in the Schema Salad specification. + + """ + + def __init__( + self, + pattern: Any, + required: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.pattern = pattern + self.required = required + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SecondaryFileSchema): + return bool( + self.pattern == other.pattern and self.required == other.required + ) + return False + + def __hash__(self) -> int: + return hash((self.pattern, self.required)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SecondaryFileSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + pattern = load_field( + _doc.get("pattern"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `pattern` field is not valid because:", + SourceLine(_doc, "pattern", str), + [e], + ) + ) + if "required" in _doc: + try: + required = load_field( + _doc.get("required"), + union_of_None_type_or_booltype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `required` field is not valid because:", + SourceLine(_doc, "required", str), + [e], + ) + ) + else: + required = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `pattern`, `required`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SecondaryFileSchema'", None, _errors__) + _constructed = cls( + pattern=pattern, + required=required, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.pattern is not None and "pattern" not in r: + r["pattern"] = save( + self.pattern, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="pattern", + val=r.get("pattern"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.required is not None and "required" not in r: + r["required"] = save( + self.required, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="required", + val=r.get("required"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["pattern", "required"]) + + +class LoadListingRequirement(ProcessRequirement): + """ + Specify the desired behavior for loading the `listing` field of + a Directory object for use by expressions. + + """ + + def __init__( + self, + loadListing: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "LoadListingRequirement" + self.loadListing = loadListing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, LoadListingRequirement): + return bool( + self.class_ == other.class_ and self.loadListing == other.loadListing + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.loadListing)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "LoadListingRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "LoadListingRequirement": + raise ValidationException("Not a LoadListingRequirement") + + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `loadListing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'LoadListingRequirement'", None, _errors__ + ) + _constructed = cls( + loadListing=loadListing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "LoadListingRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "loadListing"]) + + +class EnvironmentDef(Saveable): + """ + Define an environment variable that will be set in the runtime environment + by the workflow platform when executing the command line tool. May be the + result of executing an expression, such as getting a parameter from input. + + """ + + def __init__( + self, + envName: Any, + envValue: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.envName = envName + self.envValue = envValue + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnvironmentDef): + return bool( + self.envName == other.envName and self.envValue == other.envValue + ) + return False + + def __hash__(self) -> int: + return hash((self.envName, self.envValue)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "EnvironmentDef": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + envName = load_field( + _doc.get("envName"), + strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `envName` field is not valid because:", + SourceLine(_doc, "envName", str), + [e], + ) + ) + try: + envValue = load_field( + _doc.get("envValue"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `envValue` field is not valid because:", + SourceLine(_doc, "envValue", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `envName`, `envValue`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'EnvironmentDef'", None, _errors__) + _constructed = cls( + envName=envName, + envValue=envValue, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.envName is not None and "envName" not in r: + r["envName"] = save( + self.envName, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="envName", + val=r.get("envName"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.envValue is not None and "envValue" not in r: + r["envValue"] = save( + self.envValue, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="envValue", + val=r.get("envValue"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["envName", "envValue"]) + + +class CommandLineBinding(InputBinding): + """ + + When listed under `inputBinding` in the input schema, the term + "value" refers to the corresponding value in the input object. For + binding objects listed in `CommandLineTool.arguments`, the term "value" + refers to the effective value after evaluating `valueFrom`. + + The binding behavior when building the command line depends on the data + type of the value. If there is a mismatch between the type described by + the input schema and the effective value, such as resulting from an + expression evaluation, an implementation must use the data type of the + effective value. + + - **string**: Add `prefix` and the string to the command line. + + - **number**: Add `prefix` and decimal representation to command line. + + - **boolean**: If true, add `prefix` to the command line. If false, add + nothing. + + - **File**: Add `prefix` and the value of + [`File.path`](#File) to the command line. + + - **Directory**: Add `prefix` and the value of + [`Directory.path`](#Directory) to the command line. + + - **array**: If `itemSeparator` is specified, add `prefix` and the join + the array into a single string with `itemSeparator` separating the + items. Otherwise, first add `prefix`, then recursively process + individual elements. + If the array is empty, it does not add anything to command line. + + - **object**: Add `prefix` only, and recursively add object fields for + which `inputBinding` is specified. + + - **null**: Add nothing. + + """ + + def __init__( + self, + loadContents: Optional[Any] = None, + position: Optional[Any] = None, + prefix: Optional[Any] = None, + separate: Optional[Any] = None, + itemSeparator: Optional[Any] = None, + valueFrom: Optional[Any] = None, + shellQuote: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + self.position = position + self.prefix = prefix + self.separate = separate + self.itemSeparator = itemSeparator + self.valueFrom = valueFrom + self.shellQuote = shellQuote + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandLineBinding): + return bool( + self.loadContents == other.loadContents + and self.position == other.position + and self.prefix == other.prefix + and self.separate == other.separate + and self.itemSeparator == other.itemSeparator + and self.valueFrom == other.valueFrom + and self.shellQuote == other.shellQuote + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.loadContents, + self.position, + self.prefix, + self.separate, + self.itemSeparator, + self.valueFrom, + self.shellQuote, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandLineBinding": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "position" in _doc: + try: + position = load_field( + _doc.get("position"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `position` field is not valid because:", + SourceLine(_doc, "position", str), + [e], + ) + ) + else: + position = None + if "prefix" in _doc: + try: + prefix = load_field( + _doc.get("prefix"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `prefix` field is not valid because:", + SourceLine(_doc, "prefix", str), + [e], + ) + ) + else: + prefix = None + if "separate" in _doc: + try: + separate = load_field( + _doc.get("separate"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `separate` field is not valid because:", + SourceLine(_doc, "separate", str), + [e], + ) + ) + else: + separate = None + if "itemSeparator" in _doc: + try: + itemSeparator = load_field( + _doc.get("itemSeparator"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `itemSeparator` field is not valid because:", + SourceLine(_doc, "itemSeparator", str), + [e], + ) + ) + else: + itemSeparator = None + if "valueFrom" in _doc: + try: + valueFrom = load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [e], + ) + ) + else: + valueFrom = None + if "shellQuote" in _doc: + try: + shellQuote = load_field( + _doc.get("shellQuote"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `shellQuote` field is not valid because:", + SourceLine(_doc, "shellQuote", str), + [e], + ) + ) + else: + shellQuote = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `loadContents`, `position`, `prefix`, `separate`, `itemSeparator`, `valueFrom`, `shellQuote`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandLineBinding'", None, _errors__) + _constructed = cls( + loadContents=loadContents, + position=position, + prefix=prefix, + separate=separate, + itemSeparator=itemSeparator, + valueFrom=valueFrom, + shellQuote=shellQuote, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.position is not None and "position" not in r: + r["position"] = save( + self.position, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="position", + val=r.get("position"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.prefix is not None and "prefix" not in r: + r["prefix"] = save( + self.prefix, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="prefix", + val=r.get("prefix"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.separate is not None and "separate" not in r: + r["separate"] = save( + self.separate, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="separate", + val=r.get("separate"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.itemSeparator is not None and "itemSeparator" not in r: + r["itemSeparator"] = save( + self.itemSeparator, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="itemSeparator", + val=r.get("itemSeparator"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.valueFrom is not None and "valueFrom" not in r: + r["valueFrom"] = save( + self.valueFrom, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="valueFrom", + val=r.get("valueFrom"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.shellQuote is not None and "shellQuote" not in r: + r["shellQuote"] = save( + self.shellQuote, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="shellQuote", + val=r.get("shellQuote"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "loadContents", + "position", + "prefix", + "separate", + "itemSeparator", + "valueFrom", + "shellQuote", + ] + ) + + +class CommandOutputBinding(LoadContents): + """ + Describes how to generate an output parameter based on the files produced + by a CommandLineTool. + + The output parameter value is generated by applying these operations in the + following order: + + - glob + - loadContents + - outputEval + - secondaryFiles + + """ + + def __init__( + self, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + glob: Optional[Any] = None, + outputEval: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + self.loadListing = loadListing + self.glob = glob + self.outputEval = outputEval + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputBinding): + return bool( + self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.glob == other.glob + and self.outputEval == other.outputEval + ) + return False + + def __hash__(self) -> int: + return hash((self.loadContents, self.loadListing, self.glob, self.outputEval)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputBinding": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "glob" in _doc: + try: + glob = load_field( + _doc.get("glob"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `glob` field is not valid because:", + SourceLine(_doc, "glob", str), + [e], + ) + ) + else: + glob = None + if "outputEval" in _doc: + try: + outputEval = load_field( + _doc.get("outputEval"), + union_of_None_type_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputEval` field is not valid because:", + SourceLine(_doc, "outputEval", str), + [e], + ) + ) + else: + outputEval = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `loadContents`, `loadListing`, `glob`, `outputEval`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandOutputBinding'", None, _errors__) + _constructed = cls( + loadContents=loadContents, + loadListing=loadListing, + glob=glob, + outputEval=outputEval, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.glob is not None and "glob" not in r: + r["glob"] = save( + self.glob, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="glob", + val=r.get("glob"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputEval is not None and "outputEval" not in r: + r["outputEval"] = save( + self.outputEval, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputEval", + val=r.get("outputEval"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["loadContents", "loadListing", "glob", "outputEval"]) + + +class CommandLineBindable(Saveable): + def __init__( + self, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandLineBindable): + return bool(self.inputBinding == other.inputBinding) + return False + + def __hash__(self) -> int: + return hash((self.inputBinding)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandLineBindable": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandLineBindable'", None, _errors__) + _constructed = cls( + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["inputBinding"]) + + +class CommandInputRecordField(InputRecordField, CommandLineBindable): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.loadContents, + self.loadListing, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputRecordField'", None, _errors__ + ) + _constructed = cls( + doc=doc, + name=name, + type=type, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + loadContents=loadContents, + loadListing=loadListing, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "loadContents", + "loadListing", + "inputBinding", + ] + ) + + +class CommandInputRecordSchema( + InputRecordSchema, CommandInputSchema, CommandLineBindable +): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.doc = doc + self.name = name + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + (self.fields, self.type, self.label, self.doc, self.name, self.inputBinding) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputRecordSchema'", None, _errors__ + ) + _constructed = cls( + fields=fields, + type=type, + label=label, + doc=doc, + name=name, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name", "inputBinding"]) + + +class CommandInputEnumSchema(InputEnumSchema, CommandInputSchema, CommandLineBindable): + def __init__( + self, + symbols: Any, + type: Any, + name: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.symbols = symbols + self.type = type + self.label = label + self.doc = doc + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputEnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.name, + self.symbols, + self.type, + self.label, + self.doc, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputEnumSchema'", None, _errors__ + ) + _constructed = cls( + name=name, + symbols=symbols, + type=type, + label=label, + doc=doc, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type", "label", "doc", "inputBinding"]) + + +class CommandInputArraySchema( + InputArraySchema, CommandInputSchema, CommandLineBindable +): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.doc = doc + self.name = name + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + (self.items, self.type, self.label, self.doc, self.name, self.inputBinding) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + items = load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputArraySchema'", None, _errors__ + ) + _constructed = cls( + items=items, + type=type, + label=label, + doc=doc, + name=name, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.items is not None and "items" not in r: + u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) + r["items"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name", "inputBinding"]) + + +class CommandOutputRecordField(OutputRecordField): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + outputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.outputBinding = outputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.outputBinding == other.outputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.outputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `outputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputRecordField'", None, _errors__ + ) + _constructed = cls( + doc=doc, + name=name, + type=type, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + outputBinding=outputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "outputBinding", + ] + ) + + +class CommandOutputRecordSchema(OutputRecordSchema): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputRecordSchema'", None, _errors__ + ) + _constructed = cls( + fields=fields, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name"]) + + +class CommandOutputEnumSchema(OutputEnumSchema): + def __init__( + self, + symbols: Any, + type: Any, + name: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.symbols = symbols + self.type = type + self.label = label + self.doc = doc + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputEnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.symbols, self.type, self.label, self.doc)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputEnumSchema'", None, _errors__ + ) + _constructed = cls( + name=name, + symbols=symbols, + type=type, + label=label, + doc=doc, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type", "label", "doc"]) + + +class CommandOutputArraySchema(OutputArraySchema): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + items = load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputArraySchema'", None, _errors__ + ) + _constructed = cls( + items=items, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.items is not None and "items" not in r: + u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) + r["items"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name"]) + + +class CommandInputParameter(InputParameter): + """ + An input parameter for a CommandLineTool. + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + default: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default + self.type = type + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default + and self.type == other.type + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.loadContents, + self.loadListing, + self.default, + self.type, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandInputParameter'", None, _errors__) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + loadContents=loadContents, + loadListing=loadListing, + default=default, + type=type, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "loadContents", + "loadListing", + "default", + "type", + "inputBinding", + ] + ) + + +class CommandOutputParameter(OutputParameter): + """ + An output parameter for a CommandLineTool. + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + outputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.type = type + self.outputBinding = outputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.type == other.type + and self.outputBinding == other.outputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.type, + self.outputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`, `outputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + type=type, + outputBinding=outputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "type", + "outputBinding", + ] + ) + + +class CommandLineTool(Process): + """ + This defines the schema of the CWL Command Line Tool Description document. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + intent: Optional[Any] = None, + baseCommand: Optional[Any] = None, + arguments: Optional[Any] = None, + stdin: Optional[Any] = None, + stderr: Optional[Any] = None, + stdout: Optional[Any] = None, + successCodes: Optional[Any] = None, + temporaryFailCodes: Optional[Any] = None, + permanentFailCodes: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_ = "CommandLineTool" + self.baseCommand = baseCommand + self.arguments = arguments + self.stdin = stdin + self.stderr = stderr + self.stdout = stdout + self.successCodes = successCodes + self.temporaryFailCodes = temporaryFailCodes + self.permanentFailCodes = permanentFailCodes + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandLineTool): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + and self.baseCommand == other.baseCommand + and self.arguments == other.arguments + and self.stdin == other.stdin + and self.stderr == other.stderr + and self.stdout == other.stdout + and self.successCodes == other.successCodes + and self.temporaryFailCodes == other.temporaryFailCodes + and self.permanentFailCodes == other.permanentFailCodes + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + self.baseCommand, + self.arguments, + self.stdin, + self.stderr, + self.stdout, + self.successCodes, + self.temporaryFailCodes, + self.permanentFailCodes, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandLineTool": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "CommandLineTool": + raise ValidationException("Not a CommandLineTool") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_CommandInputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_CommandOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + if "intent" in _doc: + try: + intent = load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + ) + ) + else: + intent = None + if "baseCommand" in _doc: + try: + baseCommand = load_field( + _doc.get("baseCommand"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `baseCommand` field is not valid because:", + SourceLine(_doc, "baseCommand", str), + [e], + ) + ) + else: + baseCommand = None + if "arguments" in _doc: + try: + arguments = load_field( + _doc.get("arguments"), + union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `arguments` field is not valid because:", + SourceLine(_doc, "arguments", str), + [e], + ) + ) + else: + arguments = None + if "stdin" in _doc: + try: + stdin = load_field( + _doc.get("stdin"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `stdin` field is not valid because:", + SourceLine(_doc, "stdin", str), + [e], + ) + ) + else: + stdin = None + if "stderr" in _doc: + try: + stderr = load_field( + _doc.get("stderr"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `stderr` field is not valid because:", + SourceLine(_doc, "stderr", str), + [e], + ) + ) + else: + stderr = None + if "stdout" in _doc: + try: + stdout = load_field( + _doc.get("stdout"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `stdout` field is not valid because:", + SourceLine(_doc, "stdout", str), + [e], + ) + ) + else: + stdout = None + if "successCodes" in _doc: + try: + successCodes = load_field( + _doc.get("successCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `successCodes` field is not valid because:", + SourceLine(_doc, "successCodes", str), + [e], + ) + ) + else: + successCodes = None + if "temporaryFailCodes" in _doc: + try: + temporaryFailCodes = load_field( + _doc.get("temporaryFailCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `temporaryFailCodes` field is not valid because:", + SourceLine(_doc, "temporaryFailCodes", str), + [e], + ) + ) + else: + temporaryFailCodes = None + if "permanentFailCodes" in _doc: + try: + permanentFailCodes = load_field( + _doc.get("permanentFailCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `permanentFailCodes` field is not valid because:", + SourceLine(_doc, "permanentFailCodes", str), + [e], + ) + ) + else: + permanentFailCodes = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `baseCommand`, `arguments`, `stdin`, `stderr`, `stdout`, `successCodes`, `temporaryFailCodes`, `permanentFailCodes`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandLineTool'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + baseCommand=baseCommand, + arguments=arguments, + stdin=stdin, + stderr=stderr, + stdout=stdout, + successCodes=successCodes, + temporaryFailCodes=temporaryFailCodes, + permanentFailCodes=permanentFailCodes, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "CommandLineTool" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.intent is not None and "intent" not in r: + u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) + r["intent"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="intent", + val=r.get("intent"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.baseCommand is not None and "baseCommand" not in r: + r["baseCommand"] = save( + self.baseCommand, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="baseCommand", + val=r.get("baseCommand"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.arguments is not None and "arguments" not in r: + r["arguments"] = save( + self.arguments, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="arguments", + val=r.get("arguments"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.stdin is not None and "stdin" not in r: + r["stdin"] = save( + self.stdin, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="stdin", + val=r.get("stdin"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.stderr is not None and "stderr" not in r: + r["stderr"] = save( + self.stderr, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="stderr", + val=r.get("stderr"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.stdout is not None and "stdout" not in r: + r["stdout"] = save( + self.stdout, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="stdout", + val=r.get("stdout"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.successCodes is not None and "successCodes" not in r: + r["successCodes"] = save( + self.successCodes, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="successCodes", + val=r.get("successCodes"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.temporaryFailCodes is not None and "temporaryFailCodes" not in r: + r["temporaryFailCodes"] = save( + self.temporaryFailCodes, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="temporaryFailCodes", + val=r.get("temporaryFailCodes"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.permanentFailCodes is not None and "permanentFailCodes" not in r: + r["permanentFailCodes"] = save( + self.permanentFailCodes, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="permanentFailCodes", + val=r.get("permanentFailCodes"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + "baseCommand", + "arguments", + "stdin", + "stderr", + "stdout", + "successCodes", + "temporaryFailCodes", + "permanentFailCodes", + ] + ) + + +class DockerRequirement(ProcessRequirement): + """ + Indicates that a workflow component should be run in a + [Docker](https://docker.com) or Docker-compatible (such as + [Singularity](https://www.sylabs.io/) and [udocker](https://github.com/indigo-dc/udocker)) container environment and + specifies how to fetch or build the image. + + If a CommandLineTool lists `DockerRequirement` under + `hints` (or `requirements`), it may (or must) be run in the specified Docker + container. + + The platform must first acquire or install the correct Docker image as + specified by `dockerPull`, `dockerImport`, `dockerLoad` or `dockerFile`. + + The platform must execute the tool in the container using `docker run` with + the appropriate Docker image and tool command line. + + The workflow platform may provide input files and the designated output + directory through the use of volume bind mounts. The platform should rewrite + file paths in the input object to correspond to the Docker bind mounted + locations. That is, the platform should rewrite values in the parameter context + such as `runtime.outdir`, `runtime.tmpdir` and others to be valid paths + within the container. The platform must ensure that `runtime.outdir` and + `runtime.tmpdir` are distinct directories. + + When running a tool contained in Docker, the workflow platform must not + assume anything about the contents of the Docker container, such as the + presence or absence of specific software, except to assume that the + generated command line represents a valid command within the runtime + environment of the container. + + A container image may specify an + [ENTRYPOINT](https://docs.docker.com/engine/reference/builder/#entrypoint) + and/or + [CMD](https://docs.docker.com/engine/reference/builder/#cmd). + Command line arguments will be appended after all elements of + ENTRYPOINT, and will override all elements specified using CMD (in + other words, CMD is only used when the CommandLineTool definition + produces an empty command line). + + Use of implicit ENTRYPOINT or CMD are discouraged due to reproducibility + concerns of the implicit hidden execution point (For further discussion, see + [https://doi.org/10.12688/f1000research.15140.1](https://doi.org/10.12688/f1000research.15140.1)). Portable + CommandLineTool wrappers in which use of a container is optional must not rely on ENTRYPOINT or CMD. + CommandLineTools which do rely on ENTRYPOINT or CMD must list `DockerRequirement` in the + `requirements` section. + + ## Interaction with other requirements + + If [EnvVarRequirement](#EnvVarRequirement) is specified alongside a + DockerRequirement, the environment variables must be provided to Docker + using `--env` or `--env-file` and interact with the container's preexisting + environment as defined by Docker. + + """ + + def __init__( + self, + dockerPull: Optional[Any] = None, + dockerLoad: Optional[Any] = None, + dockerFile: Optional[Any] = None, + dockerImport: Optional[Any] = None, + dockerImageId: Optional[Any] = None, + dockerOutputDirectory: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "DockerRequirement" + self.dockerPull = dockerPull + self.dockerLoad = dockerLoad + self.dockerFile = dockerFile + self.dockerImport = dockerImport + self.dockerImageId = dockerImageId + self.dockerOutputDirectory = dockerOutputDirectory + + def __eq__(self, other: Any) -> bool: + if isinstance(other, DockerRequirement): + return bool( + self.class_ == other.class_ + and self.dockerPull == other.dockerPull + and self.dockerLoad == other.dockerLoad + and self.dockerFile == other.dockerFile + and self.dockerImport == other.dockerImport + and self.dockerImageId == other.dockerImageId + and self.dockerOutputDirectory == other.dockerOutputDirectory + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.dockerPull, + self.dockerLoad, + self.dockerFile, + self.dockerImport, + self.dockerImageId, + self.dockerOutputDirectory, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "DockerRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "DockerRequirement": + raise ValidationException("Not a DockerRequirement") + + if "dockerPull" in _doc: + try: + dockerPull = load_field( + _doc.get("dockerPull"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerPull` field is not valid because:", + SourceLine(_doc, "dockerPull", str), + [e], + ) + ) + else: + dockerPull = None + if "dockerLoad" in _doc: + try: + dockerLoad = load_field( + _doc.get("dockerLoad"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerLoad` field is not valid because:", + SourceLine(_doc, "dockerLoad", str), + [e], + ) + ) + else: + dockerLoad = None + if "dockerFile" in _doc: + try: + dockerFile = load_field( + _doc.get("dockerFile"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerFile` field is not valid because:", + SourceLine(_doc, "dockerFile", str), + [e], + ) + ) + else: + dockerFile = None + if "dockerImport" in _doc: + try: + dockerImport = load_field( + _doc.get("dockerImport"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerImport` field is not valid because:", + SourceLine(_doc, "dockerImport", str), + [e], + ) + ) + else: + dockerImport = None + if "dockerImageId" in _doc: + try: + dockerImageId = load_field( + _doc.get("dockerImageId"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerImageId` field is not valid because:", + SourceLine(_doc, "dockerImageId", str), + [e], + ) + ) + else: + dockerImageId = None + if "dockerOutputDirectory" in _doc: + try: + dockerOutputDirectory = load_field( + _doc.get("dockerOutputDirectory"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `dockerOutputDirectory` field is not valid because:", + SourceLine(_doc, "dockerOutputDirectory", str), + [e], + ) + ) + else: + dockerOutputDirectory = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `dockerPull`, `dockerLoad`, `dockerFile`, `dockerImport`, `dockerImageId`, `dockerOutputDirectory`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'DockerRequirement'", None, _errors__) + _constructed = cls( + dockerPull=dockerPull, + dockerLoad=dockerLoad, + dockerFile=dockerFile, + dockerImport=dockerImport, + dockerImageId=dockerImageId, + dockerOutputDirectory=dockerOutputDirectory, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "DockerRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.dockerPull is not None and "dockerPull" not in r: + r["dockerPull"] = save( + self.dockerPull, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerPull", + val=r.get("dockerPull"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dockerLoad is not None and "dockerLoad" not in r: + r["dockerLoad"] = save( + self.dockerLoad, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerLoad", + val=r.get("dockerLoad"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dockerFile is not None and "dockerFile" not in r: + r["dockerFile"] = save( + self.dockerFile, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerFile", + val=r.get("dockerFile"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dockerImport is not None and "dockerImport" not in r: + r["dockerImport"] = save( + self.dockerImport, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerImport", + val=r.get("dockerImport"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dockerImageId is not None and "dockerImageId" not in r: + r["dockerImageId"] = save( + self.dockerImageId, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerImageId", + val=r.get("dockerImageId"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.dockerOutputDirectory is not None and "dockerOutputDirectory" not in r: + r["dockerOutputDirectory"] = save( + self.dockerOutputDirectory, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerOutputDirectory", + val=r.get("dockerOutputDirectory"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "dockerPull", + "dockerLoad", + "dockerFile", + "dockerImport", + "dockerImageId", + "dockerOutputDirectory", + ] + ) + + +class SoftwareRequirement(ProcessRequirement): + """ + A list of software packages that should be configured in the environment of + the defined process. + + """ + + def __init__( + self, + packages: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SoftwareRequirement" + self.packages = packages + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SoftwareRequirement): + return bool(self.class_ == other.class_ and self.packages == other.packages) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.packages)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SoftwareRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "SoftwareRequirement": + raise ValidationException("Not a SoftwareRequirement") + + try: + packages = load_field( + _doc.get("packages"), + idmap_packages_array_of_SoftwarePackageLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `packages` field is not valid because:", + SourceLine(_doc, "packages", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `packages`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SoftwareRequirement'", None, _errors__) + _constructed = cls( + packages=packages, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "SoftwareRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.packages is not None and "packages" not in r: + r["packages"] = save( + self.packages, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="packages", + val=r.get("packages"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "packages"]) + + +class SoftwarePackage(Saveable): + def __init__( + self, + package: Any, + version: Optional[Any] = None, + specs: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.package = package + self.version = version + self.specs = specs + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SoftwarePackage): + return bool( + self.package == other.package + and self.version == other.version + and self.specs == other.specs + ) + return False + + def __hash__(self) -> int: + return hash((self.package, self.version, self.specs)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SoftwarePackage": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + package = load_field( + _doc.get("package"), + strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `package` field is not valid because:", + SourceLine(_doc, "package", str), + [e], + ) + ) + if "version" in _doc: + try: + version = load_field( + _doc.get("version"), + union_of_None_type_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `version` field is not valid because:", + SourceLine(_doc, "version", str), + [e], + ) + ) + else: + version = None + if "specs" in _doc: + try: + specs = load_field( + _doc.get("specs"), + uri_union_of_None_type_or_array_of_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `specs` field is not valid because:", + SourceLine(_doc, "specs", str), + [e], + ) + ) + else: + specs = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `package`, `version`, `specs`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SoftwarePackage'", None, _errors__) + _constructed = cls( + package=package, + version=version, + specs=specs, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.package is not None and "package" not in r: + r["package"] = save( + self.package, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="package", + val=r.get("package"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.version is not None and "version" not in r: + r["version"] = save( + self.version, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="version", + val=r.get("version"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.specs is not None and "specs" not in r: + u = save_relative_uri(self.specs, base_url, False, None, relative_uris) + r["specs"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="specs", + val=r.get("specs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["package", "version", "specs"]) + + +class Dirent(Saveable): + """ + Define a file or subdirectory that must be staged to a particular + place prior to executing the command line tool. May be the result + of executing an expression, such as building a configuration file + from a template. + + Usually files are staged within the [designated output directory](#Runtime_environment). + However, under certain circumstances, files may be staged at + arbitrary locations, see discussion for `entryname`. + + """ + + def __init__( + self, + entry: Any, + entryname: Optional[Any] = None, + writable: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.entryname = entryname + self.entry = entry + self.writable = writable + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Dirent): + return bool( + self.entryname == other.entryname + and self.entry == other.entry + and self.writable == other.writable + ) + return False + + def __hash__(self) -> int: + return hash((self.entryname, self.entry, self.writable)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Dirent": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "entryname" in _doc: + try: + entryname = load_field( + _doc.get("entryname"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `entryname` field is not valid because:", + SourceLine(_doc, "entryname", str), + [e], + ) + ) + else: + entryname = None + try: + entry = load_field( + _doc.get("entry"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `entry` field is not valid because:", + SourceLine(_doc, "entry", str), + [e], + ) + ) + if "writable" in _doc: + try: + writable = load_field( + _doc.get("writable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `writable` field is not valid because:", + SourceLine(_doc, "writable", str), + [e], + ) + ) + else: + writable = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `entryname`, `entry`, `writable`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Dirent'", None, _errors__) + _constructed = cls( + entryname=entryname, + entry=entry, + writable=writable, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.entryname is not None and "entryname" not in r: + r["entryname"] = save( + self.entryname, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="entryname", + val=r.get("entryname"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.entry is not None and "entry" not in r: + r["entry"] = save( + self.entry, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="entry", + val=r.get("entry"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.writable is not None and "writable" not in r: + r["writable"] = save( + self.writable, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="writable", + val=r.get("writable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["entryname", "entry", "writable"]) + + +class InitialWorkDirRequirement(ProcessRequirement): + """ + Define a list of files and subdirectories that must be staged by the workflow platform prior to executing the command line tool. + Normally files are staged within the designated output directory. However, when running inside containers, files may be staged at arbitrary locations, see discussion for [`Dirent.entryname`](#Dirent). Together with `DockerRequirement.dockerOutputDirectory` it is possible to control the locations of both input and output files when running in containers. + """ + + def __init__( + self, + listing: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "InitialWorkDirRequirement" + self.listing = listing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InitialWorkDirRequirement): + return bool(self.class_ == other.class_ and self.listing == other.listing) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.listing)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InitialWorkDirRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "InitialWorkDirRequirement": + raise ValidationException("Not a InitialWorkDirRequirement") + + try: + listing = load_field( + _doc.get("listing"), + union_of_ExpressionLoader_or_array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `listing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'InitialWorkDirRequirement'", None, _errors__ + ) + _constructed = cls( + listing=listing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "InitialWorkDirRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.listing is not None and "listing" not in r: + r["listing"] = save( + self.listing, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="listing", + val=r.get("listing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "listing"]) + + +class EnvVarRequirement(ProcessRequirement): + """ + Define a list of environment variables which will be set in the + execution environment of the tool. See `EnvironmentDef` for details. + + """ + + def __init__( + self, + envDef: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "EnvVarRequirement" + self.envDef = envDef + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnvVarRequirement): + return bool(self.class_ == other.class_ and self.envDef == other.envDef) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.envDef)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "EnvVarRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "EnvVarRequirement": + raise ValidationException("Not a EnvVarRequirement") + + try: + envDef = load_field( + _doc.get("envDef"), + idmap_envDef_array_of_EnvironmentDefLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `envDef` field is not valid because:", + SourceLine(_doc, "envDef", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `envDef`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'EnvVarRequirement'", None, _errors__) + _constructed = cls( + envDef=envDef, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "EnvVarRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.envDef is not None and "envDef" not in r: + r["envDef"] = save( + self.envDef, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="envDef", + val=r.get("envDef"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "envDef"]) + + +class ShellCommandRequirement(ProcessRequirement): + """ + Modify the behavior of CommandLineTool to generate a single string + containing a shell command line. Each item in the `arguments` list must + be joined into a string separated by single spaces and quoted to prevent + interpretation by the shell, unless `CommandLineBinding` for that argument + contains `shellQuote: false`. If `shellQuote: false` is specified, the + argument is joined into the command string without quoting, which allows + the use of shell metacharacters such as `|` for pipes. + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ShellCommandRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ShellCommandRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ShellCommandRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ShellCommandRequirement": + raise ValidationException("Not a ShellCommandRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'ShellCommandRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ShellCommandRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class ResourceRequirement(ProcessRequirement): + """ + Specify basic hardware resource requirements. + + "min" is the minimum amount of a resource that must be reserved to + schedule a job. If "min" cannot be satisfied, the job should not + be run. + + "max" is the maximum amount of a resource that the job shall be + allocated. If a node has sufficient resources, multiple jobs may + be scheduled on a single node provided each job's "max" resource + requirements are met. If a job attempts to exceed its resource + allocation, an implementation may deny additional resources, which + may result in job failure. + + If both "min" and "max" are specified, an implementation may + choose to allocate any amount between "min" and "max", with the + actual allocation provided in the `runtime` object. + + If "min" is specified but "max" is not, then "max" == "min" + If "max" is specified by "min" is not, then "min" == "max". + + It is an error if max < min. + + It is an error if the value of any of these fields is negative. + + If neither "min" nor "max" is specified for a resource, use the default values below. + + """ + + def __init__( + self, + coresMin: Optional[Any] = None, + coresMax: Optional[Any] = None, + ramMin: Optional[Any] = None, + ramMax: Optional[Any] = None, + tmpdirMin: Optional[Any] = None, + tmpdirMax: Optional[Any] = None, + outdirMin: Optional[Any] = None, + outdirMax: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ResourceRequirement" + self.coresMin = coresMin + self.coresMax = coresMax + self.ramMin = ramMin + self.ramMax = ramMax + self.tmpdirMin = tmpdirMin + self.tmpdirMax = tmpdirMax + self.outdirMin = outdirMin + self.outdirMax = outdirMax + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ResourceRequirement): + return bool( + self.class_ == other.class_ + and self.coresMin == other.coresMin + and self.coresMax == other.coresMax + and self.ramMin == other.ramMin + and self.ramMax == other.ramMax + and self.tmpdirMin == other.tmpdirMin + and self.tmpdirMax == other.tmpdirMax + and self.outdirMin == other.outdirMin + and self.outdirMax == other.outdirMax + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.coresMin, + self.coresMax, + self.ramMin, + self.ramMax, + self.tmpdirMin, + self.tmpdirMax, + self.outdirMin, + self.outdirMax, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ResourceRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ResourceRequirement": + raise ValidationException("Not a ResourceRequirement") + + if "coresMin" in _doc: + try: + coresMin = load_field( + _doc.get("coresMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `coresMin` field is not valid because:", + SourceLine(_doc, "coresMin", str), + [e], + ) + ) + else: + coresMin = None + if "coresMax" in _doc: + try: + coresMax = load_field( + _doc.get("coresMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `coresMax` field is not valid because:", + SourceLine(_doc, "coresMax", str), + [e], + ) + ) + else: + coresMax = None + if "ramMin" in _doc: + try: + ramMin = load_field( + _doc.get("ramMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `ramMin` field is not valid because:", + SourceLine(_doc, "ramMin", str), + [e], + ) + ) + else: + ramMin = None + if "ramMax" in _doc: + try: + ramMax = load_field( + _doc.get("ramMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `ramMax` field is not valid because:", + SourceLine(_doc, "ramMax", str), + [e], + ) + ) + else: + ramMax = None + if "tmpdirMin" in _doc: + try: + tmpdirMin = load_field( + _doc.get("tmpdirMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `tmpdirMin` field is not valid because:", + SourceLine(_doc, "tmpdirMin", str), + [e], + ) + ) + else: + tmpdirMin = None + if "tmpdirMax" in _doc: + try: + tmpdirMax = load_field( + _doc.get("tmpdirMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `tmpdirMax` field is not valid because:", + SourceLine(_doc, "tmpdirMax", str), + [e], + ) + ) + else: + tmpdirMax = None + if "outdirMin" in _doc: + try: + outdirMin = load_field( + _doc.get("outdirMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outdirMin` field is not valid because:", + SourceLine(_doc, "outdirMin", str), + [e], + ) + ) + else: + outdirMin = None + if "outdirMax" in _doc: + try: + outdirMax = load_field( + _doc.get("outdirMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outdirMax` field is not valid because:", + SourceLine(_doc, "outdirMax", str), + [e], + ) + ) + else: + outdirMax = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `coresMin`, `coresMax`, `ramMin`, `ramMax`, `tmpdirMin`, `tmpdirMax`, `outdirMin`, `outdirMax`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ResourceRequirement'", None, _errors__) + _constructed = cls( + coresMin=coresMin, + coresMax=coresMax, + ramMin=ramMin, + ramMax=ramMax, + tmpdirMin=tmpdirMin, + tmpdirMax=tmpdirMax, + outdirMin=outdirMin, + outdirMax=outdirMax, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ResourceRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.coresMin is not None and "coresMin" not in r: + r["coresMin"] = save( + self.coresMin, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="coresMin", + val=r.get("coresMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.coresMax is not None and "coresMax" not in r: + r["coresMax"] = save( + self.coresMax, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="coresMax", + val=r.get("coresMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.ramMin is not None and "ramMin" not in r: + r["ramMin"] = save( + self.ramMin, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="ramMin", + val=r.get("ramMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.ramMax is not None and "ramMax" not in r: + r["ramMax"] = save( + self.ramMax, top=False, base_url=base_url, relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="ramMax", + val=r.get("ramMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.tmpdirMin is not None and "tmpdirMin" not in r: + r["tmpdirMin"] = save( + self.tmpdirMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="tmpdirMin", + val=r.get("tmpdirMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.tmpdirMax is not None and "tmpdirMax" not in r: + r["tmpdirMax"] = save( + self.tmpdirMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="tmpdirMax", + val=r.get("tmpdirMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outdirMin is not None and "outdirMin" not in r: + r["outdirMin"] = save( + self.outdirMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outdirMin", + val=r.get("outdirMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outdirMax is not None and "outdirMax" not in r: + r["outdirMax"] = save( + self.outdirMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outdirMax", + val=r.get("outdirMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "coresMin", + "coresMax", + "ramMin", + "ramMax", + "tmpdirMin", + "tmpdirMax", + "outdirMin", + "outdirMax", + ] + ) + + +class WorkReuse(ProcessRequirement): + """ + For implementations that support reusing output from past work (on + the assumption that same code and same input produce same + results), control whether to enable or disable the reuse behavior + for a particular tool or step (to accommodate situations where that + assumption is incorrect). A reused step is not executed but + instead returns the same output as the original execution. + + If `WorkReuse` is not specified, correct tools should assume it + is enabled by default. + + """ + + def __init__( + self, + enableReuse: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "WorkReuse" + self.enableReuse = enableReuse + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkReuse): + return bool( + self.class_ == other.class_ and self.enableReuse == other.enableReuse + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.enableReuse)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkReuse": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "WorkReuse": + raise ValidationException("Not a WorkReuse") + + try: + enableReuse = load_field( + _doc.get("enableReuse"), + union_of_booltype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `enableReuse` field is not valid because:", + SourceLine(_doc, "enableReuse", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `enableReuse`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkReuse'", None, _errors__) + _constructed = cls( + enableReuse=enableReuse, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "WorkReuse" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.enableReuse is not None and "enableReuse" not in r: + r["enableReuse"] = save( + self.enableReuse, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="enableReuse", + val=r.get("enableReuse"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "enableReuse"]) + + +class NetworkAccess(ProcessRequirement): + """ + Indicate whether a process requires outgoing IPv4/IPv6 network + access. Choice of IPv4 or IPv6 is implementation and site + specific, correct tools must support both. + + If `networkAccess` is false or not specified, tools must not + assume network access, except for localhost (the loopback device). + + If `networkAccess` is true, the tool must be able to make outgoing + connections to network resources. Resources may be on a private + subnet or the public Internet. However, implementations and sites + may apply their own security policies to restrict what is + accessible by the tool. + + Enabling network access does not imply a publicly routable IP + address or the ability to accept inbound connections. + + """ + + def __init__( + self, + networkAccess: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "NetworkAccess" + self.networkAccess = networkAccess + + def __eq__(self, other: Any) -> bool: + if isinstance(other, NetworkAccess): + return bool( + self.class_ == other.class_ + and self.networkAccess == other.networkAccess + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.networkAccess)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "NetworkAccess": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "NetworkAccess": + raise ValidationException("Not a NetworkAccess") + + try: + networkAccess = load_field( + _doc.get("networkAccess"), + union_of_booltype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `networkAccess` field is not valid because:", + SourceLine(_doc, "networkAccess", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `networkAccess`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'NetworkAccess'", None, _errors__) + _constructed = cls( + networkAccess=networkAccess, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "NetworkAccess" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.networkAccess is not None and "networkAccess" not in r: + r["networkAccess"] = save( + self.networkAccess, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="networkAccess", + val=r.get("networkAccess"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "networkAccess"]) + + +class InplaceUpdateRequirement(ProcessRequirement): + """ + + If `inplaceUpdate` is true, then an implementation supporting this + feature may permit tools to directly update files with `writable: + true` in InitialWorkDirRequirement. That is, as an optimization, + files may be destructively modified in place as opposed to copied + and updated. + + An implementation must ensure that only one workflow step may + access a writable file at a time. It is an error if a file which + is writable by one workflow step file is accessed (for reading or + writing) by any other workflow step running independently. + However, a file which has been updated in a previous completed + step may be used as input to multiple steps, provided it is + read-only in every step. + + Workflow steps which modify a file must produce the modified file + as output. Downstream steps which further process the file must + use the output of previous steps, and not refer to a common input + (this is necessary for both ordering and correctness). + + Workflow authors should provide this in the `hints` section. The + intent of this feature is that workflows produce the same results + whether or not InplaceUpdateRequirement is supported by the + implementation, and this feature is primarily available as an + optimization for particular environments. + + Users and implementers should be aware that workflows that + destructively modify inputs may not be repeatable or reproducible. + In particular, enabling this feature implies that WorkReuse should + not be enabled. + + """ + + def __init__( + self, + inplaceUpdate: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "InplaceUpdateRequirement" + self.inplaceUpdate = inplaceUpdate + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InplaceUpdateRequirement): + return bool( + self.class_ == other.class_ + and self.inplaceUpdate == other.inplaceUpdate + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.inplaceUpdate)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InplaceUpdateRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "InplaceUpdateRequirement": + raise ValidationException("Not a InplaceUpdateRequirement") + + try: + inplaceUpdate = load_field( + _doc.get("inplaceUpdate"), + booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inplaceUpdate` field is not valid because:", + SourceLine(_doc, "inplaceUpdate", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `inplaceUpdate`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'InplaceUpdateRequirement'", None, _errors__ + ) + _constructed = cls( + inplaceUpdate=inplaceUpdate, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "InplaceUpdateRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.inplaceUpdate is not None and "inplaceUpdate" not in r: + r["inplaceUpdate"] = save( + self.inplaceUpdate, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inplaceUpdate", + val=r.get("inplaceUpdate"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "inplaceUpdate"]) + + +class ToolTimeLimit(ProcessRequirement): + """ + Set an upper limit on the execution time of a CommandLineTool. + A CommandLineTool whose execution duration exceeds the time + limit may be preemptively terminated and considered failed. + May also be used by batch systems to make scheduling decisions. + The execution duration excludes external operations, such as + staging of files, pulling a docker image etc, and only counts + wall-time for the execution of the command line itself. + + """ + + def __init__( + self, + timelimit: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ToolTimeLimit" + self.timelimit = timelimit + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ToolTimeLimit): + return bool( + self.class_ == other.class_ and self.timelimit == other.timelimit + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.timelimit)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ToolTimeLimit": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ToolTimeLimit": + raise ValidationException("Not a ToolTimeLimit") + + try: + timelimit = load_field( + _doc.get("timelimit"), + union_of_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `timelimit` field is not valid because:", + SourceLine(_doc, "timelimit", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `timelimit`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ToolTimeLimit'", None, _errors__) + _constructed = cls( + timelimit=timelimit, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ToolTimeLimit" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.timelimit is not None and "timelimit" not in r: + r["timelimit"] = save( + self.timelimit, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="timelimit", + val=r.get("timelimit"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "timelimit"]) + + +class ExpressionToolOutputParameter(OutputParameter): + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ExpressionToolOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ExpressionToolOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'ExpressionToolOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] + ) + + +class WorkflowInputParameter(InputParameter): + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + default: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default + self.type = type + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowInputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default + and self.type == other.type + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.loadContents, + self.loadListing, + self.default, + self.type, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowInputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_InputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'WorkflowInputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + loadContents=loadContents, + loadListing=loadListing, + default=default, + type=type, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "loadContents", + "loadListing", + "default", + "type", + "inputBinding", + ] + ) + + +class ExpressionTool(Process): + """ + An ExpressionTool is a type of Process object that can be run by itself + or as a Workflow step. It executes a pure Javascript expression that has + access to the same input parameters as a workflow. It is meant to be used + sparingly as a way to isolate complex Javascript expressions that need to + operate on input data and produce some result; perhaps just a + rearrangement of the inputs. No Docker software container is required + or allowed. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + expression: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + intent: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_ = "ExpressionTool" + self.expression = expression + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ExpressionTool): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + and self.expression == other.expression + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + self.expression, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ExpressionTool": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ExpressionTool": + raise ValidationException("Not a ExpressionTool") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_WorkflowInputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_ExpressionToolOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + if "intent" in _doc: + try: + intent = load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + ) + ) + else: + intent = None + try: + expression = load_field( + _doc.get("expression"), + ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `expression` field is not valid because:", + SourceLine(_doc, "expression", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `expression`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ExpressionTool'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + expression=expression, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ExpressionTool" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.intent is not None and "intent" not in r: + u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) + r["intent"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="intent", + val=r.get("intent"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.expression is not None and "expression" not in r: + r["expression"] = save( + self.expression, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="expression", + val=r.get("expression"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + "expression", + ] + ) + + +class WorkflowOutputParameter(OutputParameter): + """ + Describe an output parameter of a workflow. The parameter must be + connected to one or more parameters defined in the workflow that + will provide the value of the output parameter. It is legal to + connect a WorkflowInputParameter to a WorkflowOutputParameter. + + See [WorkflowStepInput](#WorkflowStepInput) for discussion of + `linkMerge` and `pickValue`. + + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + outputSource: Optional[Any] = None, + linkMerge: Optional[Any] = None, + pickValue: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.outputSource = outputSource + self.linkMerge = linkMerge + self.pickValue = pickValue + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.outputSource == other.outputSource + and self.linkMerge == other.linkMerge + and self.pickValue == other.pickValue + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.outputSource, + self.linkMerge, + self.pickValue, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "outputSource" in _doc: + try: + outputSource = load_field( + _doc.get("outputSource"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputSource` field is not valid because:", + SourceLine(_doc, "outputSource", str), + [e], + ) + ) + else: + outputSource = None + if "linkMerge" in _doc: + try: + linkMerge = load_field( + _doc.get("linkMerge"), + union_of_None_type_or_LinkMergeMethodLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [e], + ) + ) + else: + linkMerge = None + if "pickValue" in _doc: + try: + pickValue = load_field( + _doc.get("pickValue"), + union_of_None_type_or_PickValueMethodLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `pickValue` field is not valid because:", + SourceLine(_doc, "pickValue", str), + [e], + ) + ) + else: + pickValue = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `outputSource`, `linkMerge`, `pickValue`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'WorkflowOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + outputSource=outputSource, + linkMerge=linkMerge, + pickValue=pickValue, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputSource is not None and "outputSource" not in r: + u = save_relative_uri( + self.outputSource, str(self.id), False, 1, relative_uris + ) + r["outputSource"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputSource", + val=r.get("outputSource"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.linkMerge is not None and "linkMerge" not in r: + r["linkMerge"] = save( + self.linkMerge, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="linkMerge", + val=r.get("linkMerge"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.pickValue is not None and "pickValue" not in r: + r["pickValue"] = save( + self.pickValue, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="pickValue", + val=r.get("pickValue"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "outputSource", + "linkMerge", + "pickValue", + "type", + ] + ) + + +class Sink(Saveable): + pass + + +class WorkflowStepInput(Identified, Sink, LoadContents, Labeled): + """ + The input of a workflow step connects an upstream parameter (from the + workflow inputs, or the outputs of other workflows steps) with the input + parameters of the process specified by the `run` field. Only input parameters + declared by the target process will be passed through at runtime to the process + though additional parameters may be specified (for use within `valueFrom` + expressions for instance) - unconnected or unused parameters do not represent an + error condition. + + # Input object + + A WorkflowStepInput object must contain an `id` field in the form + `#fieldname` or `#prefix/fieldname`. When the `id` field contains a slash + `/` the field name consists of the characters following the final slash + (the prefix portion may contain one or more slashes to indicate scope). + This defines a field of the workflow step input object with the value of + the `source` parameter(s). + + # Merging multiple inbound data links + + To merge multiple inbound data links, + [MultipleInputFeatureRequirement](#MultipleInputFeatureRequirement) must be specified + in the workflow or workflow step requirements. + + If the sink parameter is an array, or named in a [workflow + scatter](#WorkflowStep) operation, there may be multiple inbound + data links listed in the `source` field. The values from the + input links are merged depending on the method specified in the + `linkMerge` field. If both `linkMerge` and `pickValue` are null + or not specified, and there is more than one element in the + `source` array, the default method is "merge_nested". + + If both `linkMerge` and `pickValue` are null or not specified, and + there is only a single element in the `source`, then the input + parameter takes the scalar value from the single input link (it is + *not* wrapped in a single-list). + + * **merge_nested** + + The input must be an array consisting of exactly one entry for each + input link. If "merge_nested" is specified with a single link, the value + from the link must be wrapped in a single-item list. + + * **merge_flattened** + + 1. The source and sink parameters must be compatible types, or the source + type must be compatible with single element from the "items" type of + the destination array parameter. + 2. Source parameters which are arrays are concatenated. + Source parameters which are single element types are appended as + single elements. + + # Picking non-null values among inbound data links + + If present, `pickValue` specifies how to pick non-null values among inbound data links. + + `pickValue` is evaluated + 1. Once all source values from upstream step or parameters are available. + 2. After `linkMerge`. + 3. Before `scatter` or `valueFrom`. + + This is specifically intended to be useful in combination with + [conditional execution](#WorkflowStep), where several upstream + steps may be connected to a single input (`source` is a list), and + skipped steps produce null values. + + Static type checkers should check for type consistency after inferring what the type + will be after `pickValue` is applied, just as they do currently for `linkMerge`. + + * **first_non_null** + + For the first level of a list input, pick the first non-null element. The result is a scalar. + It is an error if there is no non-null element. Examples: + * `[null, x, null, y] -> x` + * `[null, [null], null, y] -> [null]` + * `[null, null, null] -> Runtime Error` + + *Intended use case*: If-else pattern where the + value comes either from a conditional step or from a default or + fallback value. The conditional step(s) should be placed first in + the list. + + * **the_only_non_null** + + For the first level of a list input, pick the single non-null element. The result is a scalar. + It is an error if there is more than one non-null element. Examples: + + * `[null, x, null] -> x` + * `[null, x, null, y] -> Runtime Error` + * `[null, [null], null] -> [null]` + * `[null, null, null] -> Runtime Error` + + *Intended use case*: Switch type patterns where developer considers + more than one active code path as a workflow error + (possibly indicating an error in writing `when` condition expressions). + + * **all_non_null** + + For the first level of a list input, pick all non-null values. + The result is a list, which may be empty. Examples: + + * `[null, x, null] -> [x]` + * `[x, null, y] -> [x, y]` + * `[null, [x], [null]] -> [[x], [null]]` + * `[null, null, null] -> []` + + *Intended use case*: It is valid to have more than one source, but + sources are conditional, so null sources (from skipped steps) + should be filtered out. + + """ + + def __init__( + self, + id: Optional[Any] = None, + source: Optional[Any] = None, + linkMerge: Optional[Any] = None, + pickValue: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + label: Optional[Any] = None, + default: Optional[Any] = None, + valueFrom: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.source = source + self.linkMerge = linkMerge + self.pickValue = pickValue + self.loadContents = loadContents + self.loadListing = loadListing + self.label = label + self.default = default + self.valueFrom = valueFrom + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStepInput): + return bool( + self.id == other.id + and self.source == other.source + and self.linkMerge == other.linkMerge + and self.pickValue == other.pickValue + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.label == other.label + and self.default == other.default + and self.valueFrom == other.valueFrom + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.source, + self.linkMerge, + self.pickValue, + self.loadContents, + self.loadListing, + self.label, + self.default, + self.valueFrom, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowStepInput": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "source" in _doc: + try: + source = load_field( + _doc.get("source"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `source` field is not valid because:", + SourceLine(_doc, "source", str), + [e], + ) + ) + else: + source = None + if "linkMerge" in _doc: + try: + linkMerge = load_field( + _doc.get("linkMerge"), + union_of_None_type_or_LinkMergeMethodLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [e], + ) + ) + else: + linkMerge = None + if "pickValue" in _doc: + try: + pickValue = load_field( + _doc.get("pickValue"), + union_of_None_type_or_PickValueMethodLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `pickValue` field is not valid because:", + SourceLine(_doc, "pickValue", str), + [e], + ) + ) + else: + pickValue = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + if "valueFrom" in _doc: + try: + valueFrom = load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [e], + ) + ) + else: + valueFrom = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `source`, `linkMerge`, `pickValue`, `loadContents`, `loadListing`, `label`, `default`, `valueFrom`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkflowStepInput'", None, _errors__) + _constructed = cls( + id=id, + source=source, + linkMerge=linkMerge, + pickValue=pickValue, + loadContents=loadContents, + loadListing=loadListing, + label=label, + default=default, + valueFrom=valueFrom, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.source is not None and "source" not in r: + u = save_relative_uri(self.source, str(self.id), False, 2, relative_uris) + r["source"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="source", + val=r.get("source"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.linkMerge is not None and "linkMerge" not in r: + r["linkMerge"] = save( + self.linkMerge, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="linkMerge", + val=r.get("linkMerge"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.pickValue is not None and "pickValue" not in r: + r["pickValue"] = save( + self.pickValue, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="pickValue", + val=r.get("pickValue"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.valueFrom is not None and "valueFrom" not in r: + r["valueFrom"] = save( + self.valueFrom, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="valueFrom", + val=r.get("valueFrom"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "source", + "linkMerge", + "pickValue", + "loadContents", + "loadListing", + "label", + "default", + "valueFrom", + ] + ) + + +class WorkflowStepOutput(Identified): + """ + Associate an output parameter of the underlying process with a workflow + parameter. The workflow parameter (given in the `id` field) be may be used + as a `source` to connect with input parameters of other workflow steps, or + with an output parameter of the process. + + A unique identifier for this workflow output parameter. This is + the identifier to use in the `source` field of `WorkflowStepInput` + to connect the output value to downstream parameters. + + """ + + def __init__( + self, + id: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStepOutput): + return bool(self.id == other.id) + return False + + def __hash__(self) -> int: + return hash((self.id)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowStepOutput": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkflowStepOutput'", None, _errors__) + _constructed = cls( + id=id, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["id"]) + + +class WorkflowStep(Identified, Labeled, Documented): + """ + A workflow step is an executable element of a workflow. It specifies the + underlying process implementation (such as `CommandLineTool` or another + `Workflow`) in the `run` field and connects the input and output parameters + of the underlying process to workflow parameters. + + # Scatter/gather + + To use scatter/gather, + [ScatterFeatureRequirement](#ScatterFeatureRequirement) must be specified + in the workflow or workflow step requirements. + + A "scatter" operation specifies that the associated workflow step or + subworkflow should execute separately over a list of input elements. Each + job making up a scatter operation is independent and may be executed + concurrently. + + The `scatter` field specifies one or more input parameters which will be + scattered. An input parameter may be listed more than once. The declared + type of each input parameter implicitly becomes an array of items of the + input parameter type. If a parameter is listed more than once, it becomes + a nested array. As a result, upstream parameters which are connected to + scattered parameters must be arrays. + + All output parameter types are also implicitly wrapped in arrays. Each job + in the scatter results in an entry in the output array. + + If any scattered parameter runtime value is an empty array, all outputs are + set to empty arrays and no work is done for the step, according to + applicable scattering rules. + + If `scatter` declares more than one input parameter, `scatterMethod` + describes how to decompose the input into a discrete set of jobs. + + * **dotproduct** specifies that each of the input arrays are aligned and one + element taken from each array to construct each job. It is an error + if all input arrays are not the same length. + + * **nested_crossproduct** specifies the Cartesian product of the inputs, + producing a job for every combination of the scattered inputs. The + output must be nested arrays for each level of scattering, in the + order that the input arrays are listed in the `scatter` field. + + * **flat_crossproduct** specifies the Cartesian product of the inputs, + producing a job for every combination of the scattered inputs. The + output arrays must be flattened to a single level, but otherwise listed in the + order that the input arrays are listed in the `scatter` field. + + # Conditional execution (Optional) + + Conditional execution makes execution of a step conditional on an + expression. A step that is not executed is "skipped". A skipped + step produces `null` for all output parameters. + + The condition is evaluated after `scatter`, using the input object + of each individual scatter job. This means over a set of scatter + jobs, some may be executed and some may be skipped. When the + results are gathered, skipped steps must be `null` in the output + arrays. + + The `when` field controls conditional execution. This is an + expression that must be evaluated with `inputs` bound to the step + input object (or individual scatter job), and returns a boolean + value. It is an error if this expression returns a value other + than `true` or `false`. + + Conditionals in CWL are an optional feature and are not required + to be implemented by all consumers of CWL documents. An + implementation that does not support conditionals must return a + fatal error when attempting to execute a workflow that uses + conditional constructs the implementation does not support. + + # Subworkflows + + To specify a nested workflow as part of a workflow step, + [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) must be + specified in the workflow or workflow step requirements. + + It is a fatal error if a workflow directly or indirectly invokes itself as + a subworkflow (recursive workflows are not allowed). + + """ + + def __init__( + self, + in_: Any, + out: Any, + run: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + when: Optional[Any] = None, + scatter: Optional[Any] = None, + scatterMethod: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.in_ = in_ + self.out = out + self.requirements = requirements + self.hints = hints + self.run = run + self.when = when + self.scatter = scatter + self.scatterMethod = scatterMethod + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStep): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.in_ == other.in_ + and self.out == other.out + and self.requirements == other.requirements + and self.hints == other.hints + and self.run == other.run + and self.when == other.when + and self.scatter == other.scatter + and self.scatterMethod == other.scatterMethod + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.in_, + self.out, + self.requirements, + self.hints, + self.run, + self.when, + self.scatter, + self.scatterMethod, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowStep": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + in_ = load_field( + _doc.get("in"), + idmap_in__array_of_WorkflowStepInputLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `in` field is not valid because:", + SourceLine(_doc, "in", str), + [e], + ) + ) + try: + out = load_field( + _doc.get("out"), + uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `out` field is not valid because:", + SourceLine(_doc, "out", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + + subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) + try: + run = load_field( + _doc.get("run"), + uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_False_False_None, + subscope_baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `run` field is not valid because:", + SourceLine(_doc, "run", str), + [e], + ) + ) + if "when" in _doc: + try: + when = load_field( + _doc.get("when"), + union_of_None_type_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `when` field is not valid because:", + SourceLine(_doc, "when", str), + [e], + ) + ) + else: + when = None + if "scatter" in _doc: + try: + scatter = load_field( + _doc.get("scatter"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `scatter` field is not valid because:", + SourceLine(_doc, "scatter", str), + [e], + ) + ) + else: + scatter = None + if "scatterMethod" in _doc: + try: + scatterMethod = load_field( + _doc.get("scatterMethod"), + uri_union_of_None_type_or_ScatterMethodLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `scatterMethod` field is not valid because:", + SourceLine(_doc, "scatterMethod", str), + [e], + ) + ) + else: + scatterMethod = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `in`, `out`, `requirements`, `hints`, `run`, `when`, `scatter`, `scatterMethod`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkflowStep'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + in_=in_, + out=out, + requirements=requirements, + hints=hints, + run=run, + when=when, + scatter=scatter, + scatterMethod=scatterMethod, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.in_ is not None and "in" not in r: + r["in"] = save( + self.in_, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="in", + val=r.get("in"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.out is not None and "out" not in r: + u = save_relative_uri(self.out, str(self.id), True, None, relative_uris) + r["out"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="out", + val=r.get("out"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.run is not None and "run" not in r: + u = save_relative_uri(self.run, str(self.id), False, None, relative_uris) + r["run"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="run", + val=r.get("run"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.when is not None and "when" not in r: + r["when"] = save( + self.when, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="when", + val=r.get("when"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.scatter is not None and "scatter" not in r: + u = save_relative_uri(self.scatter, str(self.id), False, 0, relative_uris) + r["scatter"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="scatter", + val=r.get("scatter"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.scatterMethod is not None and "scatterMethod" not in r: + u = save_relative_uri( + self.scatterMethod, str(self.id), False, None, relative_uris + ) + r["scatterMethod"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="scatterMethod", + val=r.get("scatterMethod"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "in", + "out", + "requirements", + "hints", + "run", + "when", + "scatter", + "scatterMethod", + ] + ) + + +class Workflow(Process): + """ + A workflow describes a set of **steps** and the **dependencies** between + those steps. When a step produces output that will be consumed by a + second step, the first step is a dependency of the second step. + + When there is a dependency, the workflow engine must execute the preceding + step and wait for it to successfully produce output before executing the + dependent step. If two steps are defined in the workflow graph that + are not directly or indirectly dependent, these steps are **independent**, + and may execute in any order or execute concurrently. A workflow is + complete when all steps have been executed. + + Dependencies between parameters are expressed using the `source` + field on [workflow step input parameters](#WorkflowStepInput) and + `outputSource` field on [workflow output + parameters](#WorkflowOutputParameter). + + The `source` field on each workflow step input parameter expresses + the data links that contribute to the value of the step input + parameter (the "sink"). A workflow step can only begin execution + when every data link connected to a step has been fulfilled. + + The `outputSource` field on each workflow step input parameter + expresses the data links that contribute to the value of the + workflow output parameter (the "sink"). Workflow execution cannot + complete successfully until every data link connected to an output + parameter has been fulfilled. + + ## Workflow success and failure + + A completed step must result in one of `success`, `temporaryFailure` or + `permanentFailure` states. An implementation may choose to retry a step + execution which resulted in `temporaryFailure`. An implementation may + choose to either continue running other steps of a workflow, or terminate + immediately upon `permanentFailure`. + + * If any step of a workflow execution results in `permanentFailure`, then + the workflow status is `permanentFailure`. + + * If one or more steps result in `temporaryFailure` and all other steps + complete `success` or are not executed, then the workflow status is + `temporaryFailure`. + + * If all workflow steps are executed and complete with `success`, then the + workflow status is `success`. + + # Extensions + + [ScatterFeatureRequirement](#ScatterFeatureRequirement) and + [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) are + available as standard [extensions](#Extensions_and_Metadata) to core + workflow semantics. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + steps: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + intent: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_ = "Workflow" + self.steps = steps + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Workflow): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + and self.steps == other.steps + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + self.steps, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Workflow": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "Workflow": + raise ValidationException("Not a Workflow") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_WorkflowInputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_WorkflowOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + if "intent" in _doc: + try: + intent = load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + ) + ) + else: + intent = None + try: + steps = load_field( + _doc.get("steps"), + idmap_steps_union_of_array_of_WorkflowStepLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `steps` field is not valid because:", + SourceLine(_doc, "steps", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `steps`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Workflow'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + steps=steps, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "Workflow" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.intent is not None and "intent" not in r: + u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) + r["intent"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="intent", + val=r.get("intent"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.steps is not None and "steps" not in r: + r["steps"] = save( + self.steps, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="steps", + val=r.get("steps"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + "steps", + ] + ) + + +class SubworkflowFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support nested workflows in + the `run` field of [WorkflowStep](#WorkflowStep). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SubworkflowFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SubworkflowFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SubworkflowFeatureRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "SubworkflowFeatureRequirement": + raise ValidationException("Not a SubworkflowFeatureRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'SubworkflowFeatureRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "SubworkflowFeatureRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class ScatterFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support the `scatter` and + `scatterMethod` fields of [WorkflowStep](#WorkflowStep). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ScatterFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ScatterFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ScatterFeatureRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ScatterFeatureRequirement": + raise ValidationException("Not a ScatterFeatureRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'ScatterFeatureRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ScatterFeatureRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class MultipleInputFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support multiple inbound data links + listed in the `source` field of [WorkflowStepInput](#WorkflowStepInput). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "MultipleInputFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, MultipleInputFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "MultipleInputFeatureRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "MultipleInputFeatureRequirement": + raise ValidationException("Not a MultipleInputFeatureRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'MultipleInputFeatureRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "MultipleInputFeatureRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class StepInputExpressionRequirement(ProcessRequirement): + """ + Indicate that the workflow platform must support the `valueFrom` field + of [WorkflowStepInput](#WorkflowStepInput). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "StepInputExpressionRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, StepInputExpressionRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "StepInputExpressionRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "StepInputExpressionRequirement": + raise ValidationException("Not a StepInputExpressionRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'StepInputExpressionRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "StepInputExpressionRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class OperationInputParameter(InputParameter): + """ + Describe an input parameter of an operation. + + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + default: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OperationInputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.loadContents, + self.loadListing, + self.default, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OperationInputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'OperationInputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + loadContents=loadContents, + loadListing=loadListing, + default=default, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "loadContents", + "loadListing", + "default", + "type", + ] + ) + + +class OperationOutputParameter(OutputParameter): + """ + Describe an output parameter of an operation. + + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OperationOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OperationOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'OperationOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] + ) + + +class Operation(Process): + """ + This record describes an abstract operation. It is a potential + step of a workflow that has not yet been bound to a concrete + implementation. It specifies an input and output signature, but + does not provide enough information to be executed. An + implementation (or other tooling) may provide a means of binding + an Operation to a concrete process (such as Workflow, + CommandLineTool, or ExpressionTool) with a compatible signature. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + intent: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_ = "Operation" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Operation): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Operation": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "Operation": + raise ValidationException("Not a Operation") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_OperationInputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_OperationOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + if "intent" in _doc: + try: + intent = load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + ) + ) + else: + intent = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Operation'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "Operation" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + ) + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.intent is not None and "intent" not in r: + u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) + r["intent"] = u + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="intent", + val=r.get("intent"), + cols=cols, + min_col=min_col, + max_len=max_len, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + ] + ) + + +_vocab = { + "Any": "https://w3id.org/cwl/salad#Any", + "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", + "CWLType": "https://w3id.org/cwl/cwl#CWLType", + "CWLVersion": "https://w3id.org/cwl/cwl#CWLVersion", + "CommandInputArraySchema": "https://w3id.org/cwl/cwl#CommandInputArraySchema", + "CommandInputEnumSchema": "https://w3id.org/cwl/cwl#CommandInputEnumSchema", + "CommandInputParameter": "https://w3id.org/cwl/cwl#CommandInputParameter", + "CommandInputRecordField": "https://w3id.org/cwl/cwl#CommandInputRecordField", + "CommandInputRecordSchema": "https://w3id.org/cwl/cwl#CommandInputRecordSchema", + "CommandInputSchema": "https://w3id.org/cwl/cwl#CommandInputSchema", + "CommandLineBindable": "https://w3id.org/cwl/cwl#CommandLineBindable", + "CommandLineBinding": "https://w3id.org/cwl/cwl#CommandLineBinding", + "CommandLineTool": "https://w3id.org/cwl/cwl#CommandLineTool", + "CommandOutputArraySchema": "https://w3id.org/cwl/cwl#CommandOutputArraySchema", + "CommandOutputBinding": "https://w3id.org/cwl/cwl#CommandOutputBinding", + "CommandOutputEnumSchema": "https://w3id.org/cwl/cwl#CommandOutputEnumSchema", + "CommandOutputParameter": "https://w3id.org/cwl/cwl#CommandOutputParameter", + "CommandOutputRecordField": "https://w3id.org/cwl/cwl#CommandOutputRecordField", + "CommandOutputRecordSchema": "https://w3id.org/cwl/cwl#CommandOutputRecordSchema", + "Directory": "https://w3id.org/cwl/cwl#Directory", + "Dirent": "https://w3id.org/cwl/cwl#Dirent", + "DockerRequirement": "https://w3id.org/cwl/cwl#DockerRequirement", + "Documented": "https://w3id.org/cwl/salad#Documented", + "EnumSchema": "https://w3id.org/cwl/salad#EnumSchema", + "EnvVarRequirement": "https://w3id.org/cwl/cwl#EnvVarRequirement", + "EnvironmentDef": "https://w3id.org/cwl/cwl#EnvironmentDef", + "Expression": "https://w3id.org/cwl/cwl#Expression", + "ExpressionPlaceholder": "https://w3id.org/cwl/cwl#ExpressionPlaceholder", + "ExpressionTool": "https://w3id.org/cwl/cwl#ExpressionTool", + "ExpressionToolOutputParameter": "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter", + "FieldBase": "https://w3id.org/cwl/cwl#FieldBase", + "File": "https://w3id.org/cwl/cwl#File", + "IOSchema": "https://w3id.org/cwl/cwl#IOSchema", + "Identified": "https://w3id.org/cwl/cwl#Identified", + "InitialWorkDirRequirement": "https://w3id.org/cwl/cwl#InitialWorkDirRequirement", + "InlineJavascriptRequirement": "https://w3id.org/cwl/cwl#InlineJavascriptRequirement", + "InplaceUpdateRequirement": "https://w3id.org/cwl/cwl#InplaceUpdateRequirement", + "InputArraySchema": "https://w3id.org/cwl/cwl#InputArraySchema", + "InputBinding": "https://w3id.org/cwl/cwl#InputBinding", + "InputEnumSchema": "https://w3id.org/cwl/cwl#InputEnumSchema", + "InputFormat": "https://w3id.org/cwl/cwl#InputFormat", + "InputParameter": "https://w3id.org/cwl/cwl#InputParameter", + "InputRecordField": "https://w3id.org/cwl/cwl#InputRecordField", + "InputRecordSchema": "https://w3id.org/cwl/cwl#InputRecordSchema", + "InputSchema": "https://w3id.org/cwl/cwl#InputSchema", + "Labeled": "https://w3id.org/cwl/cwl#Labeled", + "LinkMergeMethod": "https://w3id.org/cwl/cwl#LinkMergeMethod", + "LoadContents": "https://w3id.org/cwl/cwl#LoadContents", + "LoadListingEnum": "https://w3id.org/cwl/cwl#LoadListingEnum", + "LoadListingRequirement": "https://w3id.org/cwl/cwl#LoadListingRequirement", + "MultipleInputFeatureRequirement": "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement", + "NetworkAccess": "https://w3id.org/cwl/cwl#NetworkAccess", + "Operation": "https://w3id.org/cwl/cwl#Operation", + "OperationInputParameter": "https://w3id.org/cwl/cwl#OperationInputParameter", + "OperationOutputParameter": "https://w3id.org/cwl/cwl#OperationOutputParameter", + "OutputArraySchema": "https://w3id.org/cwl/cwl#OutputArraySchema", + "OutputEnumSchema": "https://w3id.org/cwl/cwl#OutputEnumSchema", + "OutputFormat": "https://w3id.org/cwl/cwl#OutputFormat", + "OutputParameter": "https://w3id.org/cwl/cwl#OutputParameter", + "OutputRecordField": "https://w3id.org/cwl/cwl#OutputRecordField", + "OutputRecordSchema": "https://w3id.org/cwl/cwl#OutputRecordSchema", + "OutputSchema": "https://w3id.org/cwl/cwl#OutputSchema", + "Parameter": "https://w3id.org/cwl/cwl#Parameter", + "PickValueMethod": "https://w3id.org/cwl/cwl#PickValueMethod", + "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", + "Process": "https://w3id.org/cwl/cwl#Process", + "ProcessRequirement": "https://w3id.org/cwl/cwl#ProcessRequirement", + "RecordField": "https://w3id.org/cwl/salad#RecordField", + "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", + "ResourceRequirement": "https://w3id.org/cwl/cwl#ResourceRequirement", + "ScatterFeatureRequirement": "https://w3id.org/cwl/cwl#ScatterFeatureRequirement", + "ScatterMethod": "https://w3id.org/cwl/cwl#ScatterMethod", + "SchemaDefRequirement": "https://w3id.org/cwl/cwl#SchemaDefRequirement", + "SecondaryFileSchema": "https://w3id.org/cwl/cwl#SecondaryFileSchema", + "ShellCommandRequirement": "https://w3id.org/cwl/cwl#ShellCommandRequirement", + "Sink": "https://w3id.org/cwl/cwl#Sink", + "SoftwarePackage": "https://w3id.org/cwl/cwl#SoftwarePackage", + "SoftwareRequirement": "https://w3id.org/cwl/cwl#SoftwareRequirement", + "StepInputExpressionRequirement": "https://w3id.org/cwl/cwl#StepInputExpressionRequirement", + "SubworkflowFeatureRequirement": "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement", + "ToolTimeLimit": "https://w3id.org/cwl/cwl#ToolTimeLimit", + "WorkReuse": "https://w3id.org/cwl/cwl#WorkReuse", + "Workflow": "https://w3id.org/cwl/cwl#Workflow", + "WorkflowInputParameter": "https://w3id.org/cwl/cwl#WorkflowInputParameter", + "WorkflowOutputParameter": "https://w3id.org/cwl/cwl#WorkflowOutputParameter", + "WorkflowStep": "https://w3id.org/cwl/cwl#WorkflowStep", + "WorkflowStepInput": "https://w3id.org/cwl/cwl#WorkflowStepInput", + "WorkflowStepOutput": "https://w3id.org/cwl/cwl#WorkflowStepOutput", + "all_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/all_non_null", + "array": "https://w3id.org/cwl/salad#array", + "boolean": "http://www.w3.org/2001/XMLSchema#boolean", + "deep_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing", + "dotproduct": "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct", + "double": "http://www.w3.org/2001/XMLSchema#double", + "draft-2": "https://w3id.org/cwl/cwl#draft-2", + "draft-3": "https://w3id.org/cwl/cwl#draft-3", + "draft-3.dev1": "https://w3id.org/cwl/cwl#draft-3.dev1", + "draft-3.dev2": "https://w3id.org/cwl/cwl#draft-3.dev2", + "draft-3.dev3": "https://w3id.org/cwl/cwl#draft-3.dev3", + "draft-3.dev4": "https://w3id.org/cwl/cwl#draft-3.dev4", + "draft-3.dev5": "https://w3id.org/cwl/cwl#draft-3.dev5", + "draft-4.dev1": "https://w3id.org/cwl/cwl#draft-4.dev1", + "draft-4.dev2": "https://w3id.org/cwl/cwl#draft-4.dev2", + "draft-4.dev3": "https://w3id.org/cwl/cwl#draft-4.dev3", + "enum": "https://w3id.org/cwl/salad#enum", + "first_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/first_non_null", + "flat_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct", + "float": "http://www.w3.org/2001/XMLSchema#float", + "int": "http://www.w3.org/2001/XMLSchema#int", + "long": "http://www.w3.org/2001/XMLSchema#long", + "merge_flattened": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened", + "merge_nested": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested", + "nested_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct", + "no_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/no_listing", + "null": "https://w3id.org/cwl/salad#null", + "record": "https://w3id.org/cwl/salad#record", + "shallow_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/shallow_listing", + "stderr": "https://w3id.org/cwl/cwl#stderr", + "stdin": "https://w3id.org/cwl/cwl#stdin", + "stdout": "https://w3id.org/cwl/cwl#stdout", + "string": "http://www.w3.org/2001/XMLSchema#string", + "the_only_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/the_only_non_null", + "v1.0": "https://w3id.org/cwl/cwl#v1.0", + "v1.0.dev4": "https://w3id.org/cwl/cwl#v1.0.dev4", + "v1.1": "https://w3id.org/cwl/cwl#v1.1", + "v1.1.0-dev1": "https://w3id.org/cwl/cwl#v1.1.0-dev1", + "v1.2": "https://w3id.org/cwl/cwl#v1.2", + "v1.2.0-dev1": "https://w3id.org/cwl/cwl#v1.2.0-dev1", + "v1.2.0-dev2": "https://w3id.org/cwl/cwl#v1.2.0-dev2", + "v1.2.0-dev3": "https://w3id.org/cwl/cwl#v1.2.0-dev3", + "v1.2.0-dev4": "https://w3id.org/cwl/cwl#v1.2.0-dev4", + "v1.2.0-dev5": "https://w3id.org/cwl/cwl#v1.2.0-dev5", +} +_rvocab = { + "https://w3id.org/cwl/salad#Any": "Any", + "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", + "https://w3id.org/cwl/cwl#CWLType": "CWLType", + "https://w3id.org/cwl/cwl#CWLVersion": "CWLVersion", + "https://w3id.org/cwl/cwl#CommandInputArraySchema": "CommandInputArraySchema", + "https://w3id.org/cwl/cwl#CommandInputEnumSchema": "CommandInputEnumSchema", + "https://w3id.org/cwl/cwl#CommandInputParameter": "CommandInputParameter", + "https://w3id.org/cwl/cwl#CommandInputRecordField": "CommandInputRecordField", + "https://w3id.org/cwl/cwl#CommandInputRecordSchema": "CommandInputRecordSchema", + "https://w3id.org/cwl/cwl#CommandInputSchema": "CommandInputSchema", + "https://w3id.org/cwl/cwl#CommandLineBindable": "CommandLineBindable", + "https://w3id.org/cwl/cwl#CommandLineBinding": "CommandLineBinding", + "https://w3id.org/cwl/cwl#CommandLineTool": "CommandLineTool", + "https://w3id.org/cwl/cwl#CommandOutputArraySchema": "CommandOutputArraySchema", + "https://w3id.org/cwl/cwl#CommandOutputBinding": "CommandOutputBinding", + "https://w3id.org/cwl/cwl#CommandOutputEnumSchema": "CommandOutputEnumSchema", + "https://w3id.org/cwl/cwl#CommandOutputParameter": "CommandOutputParameter", + "https://w3id.org/cwl/cwl#CommandOutputRecordField": "CommandOutputRecordField", + "https://w3id.org/cwl/cwl#CommandOutputRecordSchema": "CommandOutputRecordSchema", + "https://w3id.org/cwl/cwl#Directory": "Directory", + "https://w3id.org/cwl/cwl#Dirent": "Dirent", + "https://w3id.org/cwl/cwl#DockerRequirement": "DockerRequirement", + "https://w3id.org/cwl/salad#Documented": "Documented", + "https://w3id.org/cwl/salad#EnumSchema": "EnumSchema", + "https://w3id.org/cwl/cwl#EnvVarRequirement": "EnvVarRequirement", + "https://w3id.org/cwl/cwl#EnvironmentDef": "EnvironmentDef", + "https://w3id.org/cwl/cwl#Expression": "Expression", + "https://w3id.org/cwl/cwl#ExpressionPlaceholder": "ExpressionPlaceholder", + "https://w3id.org/cwl/cwl#ExpressionTool": "ExpressionTool", + "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter": "ExpressionToolOutputParameter", + "https://w3id.org/cwl/cwl#FieldBase": "FieldBase", + "https://w3id.org/cwl/cwl#File": "File", + "https://w3id.org/cwl/cwl#IOSchema": "IOSchema", + "https://w3id.org/cwl/cwl#Identified": "Identified", + "https://w3id.org/cwl/cwl#InitialWorkDirRequirement": "InitialWorkDirRequirement", + "https://w3id.org/cwl/cwl#InlineJavascriptRequirement": "InlineJavascriptRequirement", + "https://w3id.org/cwl/cwl#InplaceUpdateRequirement": "InplaceUpdateRequirement", + "https://w3id.org/cwl/cwl#InputArraySchema": "InputArraySchema", + "https://w3id.org/cwl/cwl#InputBinding": "InputBinding", + "https://w3id.org/cwl/cwl#InputEnumSchema": "InputEnumSchema", + "https://w3id.org/cwl/cwl#InputFormat": "InputFormat", + "https://w3id.org/cwl/cwl#InputParameter": "InputParameter", + "https://w3id.org/cwl/cwl#InputRecordField": "InputRecordField", + "https://w3id.org/cwl/cwl#InputRecordSchema": "InputRecordSchema", + "https://w3id.org/cwl/cwl#InputSchema": "InputSchema", + "https://w3id.org/cwl/cwl#Labeled": "Labeled", + "https://w3id.org/cwl/cwl#LinkMergeMethod": "LinkMergeMethod", + "https://w3id.org/cwl/cwl#LoadContents": "LoadContents", + "https://w3id.org/cwl/cwl#LoadListingEnum": "LoadListingEnum", + "https://w3id.org/cwl/cwl#LoadListingRequirement": "LoadListingRequirement", + "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement": "MultipleInputFeatureRequirement", + "https://w3id.org/cwl/cwl#NetworkAccess": "NetworkAccess", + "https://w3id.org/cwl/cwl#Operation": "Operation", + "https://w3id.org/cwl/cwl#OperationInputParameter": "OperationInputParameter", + "https://w3id.org/cwl/cwl#OperationOutputParameter": "OperationOutputParameter", + "https://w3id.org/cwl/cwl#OutputArraySchema": "OutputArraySchema", + "https://w3id.org/cwl/cwl#OutputEnumSchema": "OutputEnumSchema", + "https://w3id.org/cwl/cwl#OutputFormat": "OutputFormat", + "https://w3id.org/cwl/cwl#OutputParameter": "OutputParameter", + "https://w3id.org/cwl/cwl#OutputRecordField": "OutputRecordField", + "https://w3id.org/cwl/cwl#OutputRecordSchema": "OutputRecordSchema", + "https://w3id.org/cwl/cwl#OutputSchema": "OutputSchema", + "https://w3id.org/cwl/cwl#Parameter": "Parameter", + "https://w3id.org/cwl/cwl#PickValueMethod": "PickValueMethod", + "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", + "https://w3id.org/cwl/cwl#Process": "Process", + "https://w3id.org/cwl/cwl#ProcessRequirement": "ProcessRequirement", + "https://w3id.org/cwl/salad#RecordField": "RecordField", + "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", + "https://w3id.org/cwl/cwl#ResourceRequirement": "ResourceRequirement", + "https://w3id.org/cwl/cwl#ScatterFeatureRequirement": "ScatterFeatureRequirement", + "https://w3id.org/cwl/cwl#ScatterMethod": "ScatterMethod", + "https://w3id.org/cwl/cwl#SchemaDefRequirement": "SchemaDefRequirement", + "https://w3id.org/cwl/cwl#SecondaryFileSchema": "SecondaryFileSchema", + "https://w3id.org/cwl/cwl#ShellCommandRequirement": "ShellCommandRequirement", + "https://w3id.org/cwl/cwl#Sink": "Sink", + "https://w3id.org/cwl/cwl#SoftwarePackage": "SoftwarePackage", + "https://w3id.org/cwl/cwl#SoftwareRequirement": "SoftwareRequirement", + "https://w3id.org/cwl/cwl#StepInputExpressionRequirement": "StepInputExpressionRequirement", + "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement": "SubworkflowFeatureRequirement", + "https://w3id.org/cwl/cwl#ToolTimeLimit": "ToolTimeLimit", + "https://w3id.org/cwl/cwl#WorkReuse": "WorkReuse", + "https://w3id.org/cwl/cwl#Workflow": "Workflow", + "https://w3id.org/cwl/cwl#WorkflowInputParameter": "WorkflowInputParameter", + "https://w3id.org/cwl/cwl#WorkflowOutputParameter": "WorkflowOutputParameter", + "https://w3id.org/cwl/cwl#WorkflowStep": "WorkflowStep", + "https://w3id.org/cwl/cwl#WorkflowStepInput": "WorkflowStepInput", + "https://w3id.org/cwl/cwl#WorkflowStepOutput": "WorkflowStepOutput", + "https://w3id.org/cwl/cwl#PickValueMethod/all_non_null": "all_non_null", + "https://w3id.org/cwl/salad#array": "array", + "http://www.w3.org/2001/XMLSchema#boolean": "boolean", + "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing": "deep_listing", + "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct": "dotproduct", + "http://www.w3.org/2001/XMLSchema#double": "double", + "https://w3id.org/cwl/cwl#draft-2": "draft-2", + "https://w3id.org/cwl/cwl#draft-3": "draft-3", + "https://w3id.org/cwl/cwl#draft-3.dev1": "draft-3.dev1", + "https://w3id.org/cwl/cwl#draft-3.dev2": "draft-3.dev2", + "https://w3id.org/cwl/cwl#draft-3.dev3": "draft-3.dev3", + "https://w3id.org/cwl/cwl#draft-3.dev4": "draft-3.dev4", + "https://w3id.org/cwl/cwl#draft-3.dev5": "draft-3.dev5", + "https://w3id.org/cwl/cwl#draft-4.dev1": "draft-4.dev1", + "https://w3id.org/cwl/cwl#draft-4.dev2": "draft-4.dev2", + "https://w3id.org/cwl/cwl#draft-4.dev3": "draft-4.dev3", + "https://w3id.org/cwl/salad#enum": "enum", + "https://w3id.org/cwl/cwl#PickValueMethod/first_non_null": "first_non_null", + "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct": "flat_crossproduct", + "http://www.w3.org/2001/XMLSchema#float": "float", + "http://www.w3.org/2001/XMLSchema#int": "int", + "http://www.w3.org/2001/XMLSchema#long": "long", + "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened": "merge_flattened", + "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested": "merge_nested", + "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct": "nested_crossproduct", + "https://w3id.org/cwl/cwl#LoadListingEnum/no_listing": "no_listing", + "https://w3id.org/cwl/salad#null": "null", + "https://w3id.org/cwl/salad#record": "record", + "https://w3id.org/cwl/cwl#LoadListingEnum/shallow_listing": "shallow_listing", + "https://w3id.org/cwl/cwl#stderr": "stderr", + "https://w3id.org/cwl/cwl#stdin": "stdin", + "https://w3id.org/cwl/cwl#stdout": "stdout", + "http://www.w3.org/2001/XMLSchema#string": "string", + "https://w3id.org/cwl/cwl#PickValueMethod/the_only_non_null": "the_only_non_null", + "https://w3id.org/cwl/cwl#v1.0": "v1.0", + "https://w3id.org/cwl/cwl#v1.0.dev4": "v1.0.dev4", + "https://w3id.org/cwl/cwl#v1.1": "v1.1", + "https://w3id.org/cwl/cwl#v1.1.0-dev1": "v1.1.0-dev1", + "https://w3id.org/cwl/cwl#v1.2": "v1.2", + "https://w3id.org/cwl/cwl#v1.2.0-dev1": "v1.2.0-dev1", + "https://w3id.org/cwl/cwl#v1.2.0-dev2": "v1.2.0-dev2", + "https://w3id.org/cwl/cwl#v1.2.0-dev3": "v1.2.0-dev3", + "https://w3id.org/cwl/cwl#v1.2.0-dev4": "v1.2.0-dev4", + "https://w3id.org/cwl/cwl#v1.2.0-dev5": "v1.2.0-dev5", +} + +strtype = _PrimitiveLoader(str) +inttype = _PrimitiveLoader(int) +floattype = _PrimitiveLoader(float) +booltype = _PrimitiveLoader(bool) +None_type = _PrimitiveLoader(type(None)) +Any_type = _AnyLoader() +PrimitiveTypeLoader = _EnumLoader( + ( + "null", + "boolean", + "int", + "long", + "float", + "double", + "string", + ), + "PrimitiveType", +) +AnyLoader = _EnumLoader(("Any",), "Any") +RecordFieldLoader = _RecordLoader(RecordField) +RecordSchemaLoader = _RecordLoader(RecordSchema) +EnumSchemaLoader = _RecordLoader(EnumSchema) +ArraySchemaLoader = _RecordLoader(ArraySchema) +CWLVersionLoader = _EnumLoader( + ( + "draft-2", + "draft-3.dev1", + "draft-3.dev2", + "draft-3.dev3", + "draft-3.dev4", + "draft-3.dev5", + "draft-3", + "draft-4.dev1", + "draft-4.dev2", + "draft-4.dev3", + "v1.0.dev4", + "v1.0", + "v1.1.0-dev1", + "v1.1", + "v1.2.0-dev1", + "v1.2.0-dev2", + "v1.2.0-dev3", + "v1.2.0-dev4", + "v1.2.0-dev5", + "v1.2", + ), + "CWLVersion", +) +CWLTypeLoader = _EnumLoader( + ( + "null", + "boolean", + "int", + "long", + "float", + "double", + "string", + "File", + "Directory", + ), + "CWLType", +) +FileLoader = _RecordLoader(File) +DirectoryLoader = _RecordLoader(Directory) +LoadListingEnumLoader = _EnumLoader( + ( + "no_listing", + "shallow_listing", + "deep_listing", + ), + "LoadListingEnum", +) +ExpressionLoader = _ExpressionLoader(str) +InputBindingLoader = _RecordLoader(InputBinding) +InputRecordFieldLoader = _RecordLoader(InputRecordField) +InputRecordSchemaLoader = _RecordLoader(InputRecordSchema) +InputEnumSchemaLoader = _RecordLoader(InputEnumSchema) +InputArraySchemaLoader = _RecordLoader(InputArraySchema) +OutputRecordFieldLoader = _RecordLoader(OutputRecordField) +OutputRecordSchemaLoader = _RecordLoader(OutputRecordSchema) +OutputEnumSchemaLoader = _RecordLoader(OutputEnumSchema) +OutputArraySchemaLoader = _RecordLoader(OutputArraySchema) +InlineJavascriptRequirementLoader = _RecordLoader(InlineJavascriptRequirement) +SchemaDefRequirementLoader = _RecordLoader(SchemaDefRequirement) +SecondaryFileSchemaLoader = _RecordLoader(SecondaryFileSchema) +LoadListingRequirementLoader = _RecordLoader(LoadListingRequirement) +EnvironmentDefLoader = _RecordLoader(EnvironmentDef) +CommandLineBindingLoader = _RecordLoader(CommandLineBinding) +CommandOutputBindingLoader = _RecordLoader(CommandOutputBinding) +CommandLineBindableLoader = _RecordLoader(CommandLineBindable) +CommandInputRecordFieldLoader = _RecordLoader(CommandInputRecordField) +CommandInputRecordSchemaLoader = _RecordLoader(CommandInputRecordSchema) +CommandInputEnumSchemaLoader = _RecordLoader(CommandInputEnumSchema) +CommandInputArraySchemaLoader = _RecordLoader(CommandInputArraySchema) +CommandOutputRecordFieldLoader = _RecordLoader(CommandOutputRecordField) +CommandOutputRecordSchemaLoader = _RecordLoader(CommandOutputRecordSchema) +CommandOutputEnumSchemaLoader = _RecordLoader(CommandOutputEnumSchema) +CommandOutputArraySchemaLoader = _RecordLoader(CommandOutputArraySchema) +CommandInputParameterLoader = _RecordLoader(CommandInputParameter) +CommandOutputParameterLoader = _RecordLoader(CommandOutputParameter) +stdinLoader = _EnumLoader(("stdin",), "stdin") +stdoutLoader = _EnumLoader(("stdout",), "stdout") +stderrLoader = _EnumLoader(("stderr",), "stderr") +CommandLineToolLoader = _RecordLoader(CommandLineTool) +DockerRequirementLoader = _RecordLoader(DockerRequirement) +SoftwareRequirementLoader = _RecordLoader(SoftwareRequirement) +SoftwarePackageLoader = _RecordLoader(SoftwarePackage) +DirentLoader = _RecordLoader(Dirent) +InitialWorkDirRequirementLoader = _RecordLoader(InitialWorkDirRequirement) +EnvVarRequirementLoader = _RecordLoader(EnvVarRequirement) +ShellCommandRequirementLoader = _RecordLoader(ShellCommandRequirement) +ResourceRequirementLoader = _RecordLoader(ResourceRequirement) +WorkReuseLoader = _RecordLoader(WorkReuse) +NetworkAccessLoader = _RecordLoader(NetworkAccess) +InplaceUpdateRequirementLoader = _RecordLoader(InplaceUpdateRequirement) +ToolTimeLimitLoader = _RecordLoader(ToolTimeLimit) +ExpressionToolOutputParameterLoader = _RecordLoader(ExpressionToolOutputParameter) +WorkflowInputParameterLoader = _RecordLoader(WorkflowInputParameter) +ExpressionToolLoader = _RecordLoader(ExpressionTool) +LinkMergeMethodLoader = _EnumLoader( + ( + "merge_nested", + "merge_flattened", + ), + "LinkMergeMethod", +) +PickValueMethodLoader = _EnumLoader( + ( + "first_non_null", + "the_only_non_null", + "all_non_null", + ), + "PickValueMethod", +) +WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter) +WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput) +WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput) +ScatterMethodLoader = _EnumLoader( + ( + "dotproduct", + "nested_crossproduct", + "flat_crossproduct", + ), + "ScatterMethod", +) +WorkflowStepLoader = _RecordLoader(WorkflowStep) +WorkflowLoader = _RecordLoader(Workflow) +SubworkflowFeatureRequirementLoader = _RecordLoader(SubworkflowFeatureRequirement) +ScatterFeatureRequirementLoader = _RecordLoader(ScatterFeatureRequirement) +MultipleInputFeatureRequirementLoader = _RecordLoader(MultipleInputFeatureRequirement) +StepInputExpressionRequirementLoader = _RecordLoader(StepInputExpressionRequirement) +OperationInputParameterLoader = _RecordLoader(OperationInputParameter) +OperationOutputParameterLoader = _RecordLoader(OperationOutputParameter) +OperationLoader = _RecordLoader(Operation) +array_of_strtype = _ArrayLoader(strtype) +union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader( + ( + None_type, + strtype, + array_of_strtype, + ) +) +uri_strtype_True_False_None = _URILoader(strtype, True, False, None) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader( + ( + PrimitiveTypeLoader, + RecordSchemaLoader, + EnumSchemaLoader, + ArraySchemaLoader, + strtype, + ) +) +array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype +) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader( + ( + PrimitiveTypeLoader, + RecordSchemaLoader, + EnumSchemaLoader, + ArraySchemaLoader, + strtype, + array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, + 2, +) +array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) +union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_RecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_RecordFieldLoader, "name", "type" +) +Record_nameLoader = _EnumLoader(("record",), "Record_name") +typedsl_Record_nameLoader_2 = _TypeDSLLoader(Record_nameLoader, 2) +union_of_None_type_or_strtype = _UnionLoader( + ( + None_type, + strtype, + ) +) +uri_union_of_None_type_or_strtype_True_False_None = _URILoader( + union_of_None_type_or_strtype, True, False, None +) +uri_array_of_strtype_True_False_None = _URILoader(array_of_strtype, True, False, None) +Enum_nameLoader = _EnumLoader(("enum",), "Enum_name") +typedsl_Enum_nameLoader_2 = _TypeDSLLoader(Enum_nameLoader, 2) +uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2 = _URILoader( + union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, + False, + True, + 2, +) +Array_nameLoader = _EnumLoader(("array",), "Array_name") +typedsl_Array_nameLoader_2 = _TypeDSLLoader(Array_nameLoader, 2) +File_classLoader = _EnumLoader(("File",), "File_class") +uri_File_classLoader_False_True_None = _URILoader(File_classLoader, False, True, None) +uri_union_of_None_type_or_strtype_False_False_None = _URILoader( + union_of_None_type_or_strtype, False, False, None +) +union_of_None_type_or_inttype = _UnionLoader( + ( + None_type, + inttype, + ) +) +union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + FileLoader, + DirectoryLoader, + ) +) +array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( + union_of_FileLoader_or_DirectoryLoader +) +union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + None_type, + array_of_union_of_FileLoader_or_DirectoryLoader, + ) +) +secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _SecondaryDSLLoader( + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader +) +Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") +uri_Directory_classLoader_False_True_None = _URILoader( + Directory_classLoader, False, True, None +) +union_of_None_type_or_booltype = _UnionLoader( + ( + None_type, + booltype, + ) +) +union_of_None_type_or_LoadListingEnumLoader = _UnionLoader( + ( + None_type, + LoadListingEnumLoader, + ) +) +array_of_SecondaryFileSchemaLoader = _ArrayLoader(SecondaryFileSchemaLoader) +union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _UnionLoader( + ( + None_type, + SecondaryFileSchemaLoader, + array_of_SecondaryFileSchemaLoader, + ) +) +secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _SecondaryDSLLoader( + union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader +) +union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + strtype, + array_of_strtype, + ExpressionLoader, + ) +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, + True, + False, + None, +) +union_of_None_type_or_strtype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + strtype, + ExpressionLoader, + ) +) +uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None = _URILoader( + union_of_None_type_or_strtype_or_ExpressionLoader, True, False, None +) +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + 2, +) +array_of_InputRecordFieldLoader = _ArrayLoader(InputRecordFieldLoader) +union_of_None_type_or_array_of_InputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_InputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_InputRecordFieldLoader, "name", "type" +) +uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2 = _URILoader( + union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + False, + True, + 2, +) +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + OutputRecordSchemaLoader, + OutputEnumSchemaLoader, + OutputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + OutputRecordSchemaLoader, + OutputEnumSchemaLoader, + OutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + 2, +) +array_of_OutputRecordFieldLoader = _ArrayLoader(OutputRecordFieldLoader) +union_of_None_type_or_array_of_OutputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_OutputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_OutputRecordFieldLoader, "name", "type" +) +uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2 = _URILoader( + union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + False, + True, + 2, +) +union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type = _UnionLoader( + ( + None_type, + FileLoader, + DirectoryLoader, + Any_type, + ) +) +union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _UnionLoader( + ( + CommandInputParameterLoader, + WorkflowInputParameterLoader, + OperationInputParameterLoader, + ) +) +array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _ArrayLoader( + union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader +) +idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _IdMapLoader( + array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader, + "id", + "type", +) +union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _UnionLoader( + ( + CommandOutputParameterLoader, + ExpressionToolOutputParameterLoader, + WorkflowOutputParameterLoader, + OperationOutputParameterLoader, + ) +) +array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _ArrayLoader( + union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader +) +idmap_outputs_array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _IdMapLoader( + array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader, + "id", + "type", +) +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( + ( + InlineJavascriptRequirementLoader, + SchemaDefRequirementLoader, + LoadListingRequirementLoader, + DockerRequirementLoader, + SoftwareRequirementLoader, + InitialWorkDirRequirementLoader, + EnvVarRequirementLoader, + ShellCommandRequirementLoader, + ResourceRequirementLoader, + WorkReuseLoader, + NetworkAccessLoader, + InplaceUpdateRequirementLoader, + ToolTimeLimitLoader, + SubworkflowFeatureRequirementLoader, + ScatterFeatureRequirementLoader, + MultipleInputFeatureRequirementLoader, + StepInputExpressionRequirementLoader, + ) +) +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader +) +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( + ( + None_type, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + ) +) +idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + "class", + "None", +) +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( + ( + InlineJavascriptRequirementLoader, + SchemaDefRequirementLoader, + LoadListingRequirementLoader, + DockerRequirementLoader, + SoftwareRequirementLoader, + InitialWorkDirRequirementLoader, + EnvVarRequirementLoader, + ShellCommandRequirementLoader, + ResourceRequirementLoader, + WorkReuseLoader, + NetworkAccessLoader, + InplaceUpdateRequirementLoader, + ToolTimeLimitLoader, + SubworkflowFeatureRequirementLoader, + ScatterFeatureRequirementLoader, + MultipleInputFeatureRequirementLoader, + StepInputExpressionRequirementLoader, + Any_type, + ) +) +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type +) +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( + ( + None_type, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + ) +) +idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + "class", + "None", +) +union_of_None_type_or_CWLVersionLoader = _UnionLoader( + ( + None_type, + CWLVersionLoader, + ) +) +uri_union_of_None_type_or_CWLVersionLoader_False_True_None = _URILoader( + union_of_None_type_or_CWLVersionLoader, False, True, None +) +union_of_None_type_or_array_of_strtype = _UnionLoader( + ( + None_type, + array_of_strtype, + ) +) +uri_union_of_None_type_or_array_of_strtype_True_False_None = _URILoader( + union_of_None_type_or_array_of_strtype, True, False, None +) +InlineJavascriptRequirement_classLoader = _EnumLoader( + ("InlineJavascriptRequirement",), "InlineJavascriptRequirement_class" +) +uri_InlineJavascriptRequirement_classLoader_False_True_None = _URILoader( + InlineJavascriptRequirement_classLoader, False, True, None +) +SchemaDefRequirement_classLoader = _EnumLoader( + ("SchemaDefRequirement",), "SchemaDefRequirement_class" +) +uri_SchemaDefRequirement_classLoader_False_True_None = _URILoader( + SchemaDefRequirement_classLoader, False, True, None +) +union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _UnionLoader( + ( + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + ) +) +array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _ArrayLoader( + union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader +) +union_of_strtype_or_ExpressionLoader = _UnionLoader( + ( + strtype, + ExpressionLoader, + ) +) +union_of_None_type_or_booltype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + booltype, + ExpressionLoader, + ) +) +LoadListingRequirement_classLoader = _EnumLoader( + ("LoadListingRequirement",), "LoadListingRequirement_class" +) +uri_LoadListingRequirement_classLoader_False_True_None = _URILoader( + LoadListingRequirement_classLoader, False, True, None +) +union_of_None_type_or_inttype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + inttype, + ExpressionLoader, + ) +) +union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype = _UnionLoader( + ( + None_type, + strtype, + ExpressionLoader, + array_of_strtype, + ) +) +union_of_None_type_or_ExpressionLoader = _UnionLoader( + ( + None_type, + ExpressionLoader, + ) +) +union_of_None_type_or_CommandLineBindingLoader = _UnionLoader( + ( + None_type, + CommandLineBindingLoader, + ) +) +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + 2, +) +array_of_CommandInputRecordFieldLoader = _ArrayLoader(CommandInputRecordFieldLoader) +union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_CommandInputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" + ) +) +uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2 = _URILoader( + union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + False, + True, + 2, +) +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + 2, +) +union_of_None_type_or_CommandOutputBindingLoader = _UnionLoader( + ( + None_type, + CommandOutputBindingLoader, + ) +) +array_of_CommandOutputRecordFieldLoader = _ArrayLoader(CommandOutputRecordFieldLoader) +union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_CommandOutputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" + ) +) +uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2 = _URILoader( + union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + False, + True, + 2, +) +union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + stdinLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + 2, +) +union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + stdoutLoader, + stderrLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + 2, +) +CommandLineTool_classLoader = _EnumLoader(("CommandLineTool",), "CommandLineTool_class") +uri_CommandLineTool_classLoader_False_True_None = _URILoader( + CommandLineTool_classLoader, False, True, None +) +array_of_CommandInputParameterLoader = _ArrayLoader(CommandInputParameterLoader) +idmap_inputs_array_of_CommandInputParameterLoader = _IdMapLoader( + array_of_CommandInputParameterLoader, "id", "type" +) +array_of_CommandOutputParameterLoader = _ArrayLoader(CommandOutputParameterLoader) +idmap_outputs_array_of_CommandOutputParameterLoader = _IdMapLoader( + array_of_CommandOutputParameterLoader, "id", "type" +) +union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( + ( + strtype, + ExpressionLoader, + CommandLineBindingLoader, + ) +) +array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( + _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) +) +union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( + ( + None_type, + array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + ) +) +array_of_inttype = _ArrayLoader(inttype) +union_of_None_type_or_array_of_inttype = _UnionLoader( + ( + None_type, + array_of_inttype, + ) +) +DockerRequirement_classLoader = _EnumLoader( + ("DockerRequirement",), "DockerRequirement_class" +) +uri_DockerRequirement_classLoader_False_True_None = _URILoader( + DockerRequirement_classLoader, False, True, None +) +SoftwareRequirement_classLoader = _EnumLoader( + ("SoftwareRequirement",), "SoftwareRequirement_class" +) +uri_SoftwareRequirement_classLoader_False_True_None = _URILoader( + SoftwareRequirement_classLoader, False, True, None +) +array_of_SoftwarePackageLoader = _ArrayLoader(SoftwarePackageLoader) +idmap_packages_array_of_SoftwarePackageLoader = _IdMapLoader( + array_of_SoftwarePackageLoader, "package", "specs" +) +uri_union_of_None_type_or_array_of_strtype_False_False_None = _URILoader( + union_of_None_type_or_array_of_strtype, False, False, None +) +InitialWorkDirRequirement_classLoader = _EnumLoader( + ("InitialWorkDirRequirement",), "InitialWorkDirRequirement_class" +) +uri_InitialWorkDirRequirement_classLoader_False_True_None = _URILoader( + InitialWorkDirRequirement_classLoader, False, True, None +) +union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + None_type, + DirentLoader, + ExpressionLoader, + FileLoader, + DirectoryLoader, + array_of_union_of_FileLoader_or_DirectoryLoader, + ) +) +array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( + union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader +) +union_of_ExpressionLoader_or_array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + ExpressionLoader, + array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader, + ) +) +EnvVarRequirement_classLoader = _EnumLoader( + ("EnvVarRequirement",), "EnvVarRequirement_class" +) +uri_EnvVarRequirement_classLoader_False_True_None = _URILoader( + EnvVarRequirement_classLoader, False, True, None +) +array_of_EnvironmentDefLoader = _ArrayLoader(EnvironmentDefLoader) +idmap_envDef_array_of_EnvironmentDefLoader = _IdMapLoader( + array_of_EnvironmentDefLoader, "envName", "envValue" +) +ShellCommandRequirement_classLoader = _EnumLoader( + ("ShellCommandRequirement",), "ShellCommandRequirement_class" +) +uri_ShellCommandRequirement_classLoader_False_True_None = _URILoader( + ShellCommandRequirement_classLoader, False, True, None +) +ResourceRequirement_classLoader = _EnumLoader( + ("ResourceRequirement",), "ResourceRequirement_class" +) +uri_ResourceRequirement_classLoader_False_True_None = _URILoader( + ResourceRequirement_classLoader, False, True, None +) +union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + inttype, + floattype, + ExpressionLoader, + ) +) +WorkReuse_classLoader = _EnumLoader(("WorkReuse",), "WorkReuse_class") +uri_WorkReuse_classLoader_False_True_None = _URILoader( + WorkReuse_classLoader, False, True, None +) +union_of_booltype_or_ExpressionLoader = _UnionLoader( + ( + booltype, + ExpressionLoader, + ) +) +NetworkAccess_classLoader = _EnumLoader(("NetworkAccess",), "NetworkAccess_class") +uri_NetworkAccess_classLoader_False_True_None = _URILoader( + NetworkAccess_classLoader, False, True, None +) +InplaceUpdateRequirement_classLoader = _EnumLoader( + ("InplaceUpdateRequirement",), "InplaceUpdateRequirement_class" +) +uri_InplaceUpdateRequirement_classLoader_False_True_None = _URILoader( + InplaceUpdateRequirement_classLoader, False, True, None +) +ToolTimeLimit_classLoader = _EnumLoader(("ToolTimeLimit",), "ToolTimeLimit_class") +uri_ToolTimeLimit_classLoader_False_True_None = _URILoader( + ToolTimeLimit_classLoader, False, True, None +) +union_of_inttype_or_ExpressionLoader = _UnionLoader( + ( + inttype, + ExpressionLoader, + ) +) +union_of_None_type_or_InputBindingLoader = _UnionLoader( + ( + None_type, + InputBindingLoader, + ) +) +ExpressionTool_classLoader = _EnumLoader(("ExpressionTool",), "ExpressionTool_class") +uri_ExpressionTool_classLoader_False_True_None = _URILoader( + ExpressionTool_classLoader, False, True, None +) +array_of_WorkflowInputParameterLoader = _ArrayLoader(WorkflowInputParameterLoader) +idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader( + array_of_WorkflowInputParameterLoader, "id", "type" +) +array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( + ExpressionToolOutputParameterLoader +) +idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( + array_of_ExpressionToolOutputParameterLoader, "id", "type" +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1 = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1 +) +union_of_None_type_or_LinkMergeMethodLoader = _UnionLoader( + ( + None_type, + LinkMergeMethodLoader, + ) +) +union_of_None_type_or_PickValueMethodLoader = _UnionLoader( + ( + None_type, + PickValueMethodLoader, + ) +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2 = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2 +) +array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader) +idmap_in__array_of_WorkflowStepInputLoader = _IdMapLoader( + array_of_WorkflowStepInputLoader, "id", "source" +) +union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( + ( + strtype, + WorkflowStepOutputLoader, + ) +) +array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader( + union_of_strtype_or_WorkflowStepOutputLoader +) +union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( + (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) +) +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = ( + _URILoader( + union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, + True, + False, + None, + ) +) +array_of_Any_type = _ArrayLoader(Any_type) +union_of_None_type_or_array_of_Any_type = _UnionLoader( + ( + None_type, + array_of_Any_type, + ) +) +idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( + union_of_None_type_or_array_of_Any_type, "class", "None" +) +union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _UnionLoader( + ( + strtype, + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + OperationLoader, + ) +) +uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_False_False_None = _URILoader( + union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + False, + False, + None, +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0 = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 0 +) +union_of_None_type_or_ScatterMethodLoader = _UnionLoader( + ( + None_type, + ScatterMethodLoader, + ) +) +uri_union_of_None_type_or_ScatterMethodLoader_False_True_None = _URILoader( + union_of_None_type_or_ScatterMethodLoader, False, True, None +) +Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") +uri_Workflow_classLoader_False_True_None = _URILoader( + Workflow_classLoader, False, True, None +) +array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) +idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( + array_of_WorkflowOutputParameterLoader, "id", "type" +) +array_of_WorkflowStepLoader = _ArrayLoader(WorkflowStepLoader) +union_of_array_of_WorkflowStepLoader = _UnionLoader((array_of_WorkflowStepLoader,)) +idmap_steps_union_of_array_of_WorkflowStepLoader = _IdMapLoader( + union_of_array_of_WorkflowStepLoader, "id", "None" +) +SubworkflowFeatureRequirement_classLoader = _EnumLoader( + ("SubworkflowFeatureRequirement",), "SubworkflowFeatureRequirement_class" +) +uri_SubworkflowFeatureRequirement_classLoader_False_True_None = _URILoader( + SubworkflowFeatureRequirement_classLoader, False, True, None +) +ScatterFeatureRequirement_classLoader = _EnumLoader( + ("ScatterFeatureRequirement",), "ScatterFeatureRequirement_class" +) +uri_ScatterFeatureRequirement_classLoader_False_True_None = _URILoader( + ScatterFeatureRequirement_classLoader, False, True, None +) +MultipleInputFeatureRequirement_classLoader = _EnumLoader( + ("MultipleInputFeatureRequirement",), "MultipleInputFeatureRequirement_class" +) +uri_MultipleInputFeatureRequirement_classLoader_False_True_None = _URILoader( + MultipleInputFeatureRequirement_classLoader, False, True, None +) +StepInputExpressionRequirement_classLoader = _EnumLoader( + ("StepInputExpressionRequirement",), "StepInputExpressionRequirement_class" +) +uri_StepInputExpressionRequirement_classLoader_False_True_None = _URILoader( + StepInputExpressionRequirement_classLoader, False, True, None +) +Operation_classLoader = _EnumLoader(("Operation",), "Operation_class") +uri_Operation_classLoader_False_True_None = _URILoader( + Operation_classLoader, False, True, None +) +array_of_OperationInputParameterLoader = _ArrayLoader(OperationInputParameterLoader) +idmap_inputs_array_of_OperationInputParameterLoader = _IdMapLoader( + array_of_OperationInputParameterLoader, "id", "type" +) +array_of_OperationOutputParameterLoader = _ArrayLoader(OperationOutputParameterLoader) +idmap_outputs_array_of_OperationOutputParameterLoader = _IdMapLoader( + array_of_OperationOutputParameterLoader, "id", "type" +) +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _UnionLoader( + ( + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + OperationLoader, + ) +) +array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _ArrayLoader( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader +) +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _UnionLoader( + ( + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + OperationLoader, + array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + ) +) + + +def load_document( + doc: Any, + baseuri: Optional[str] = None, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + if baseuri is None: + baseuri = file_uri(os.getcwd()) + "/" + if loadingOptions is None: + loadingOptions = LoadingOptions() + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + doc, + baseuri, + loadingOptions, + ) + return result + + +def load_document_with_metadata( + doc: Any, + baseuri: Optional[str] = None, + loadingOptions: Optional[LoadingOptions] = None, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> Any: + if baseuri is None: + baseuri = file_uri(os.getcwd()) + "/" + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=baseuri) + return _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + doc, + baseuri, + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + +def load_document_by_string( + string: Any, + uri: str, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + yaml = yaml_no_ts() + result = yaml.load(string) + add_lc_filename(result, uri) + + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=uri) + + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + result, + uri, + loadingOptions, + ) + return result + + +def load_document_by_yaml( + yaml: Any, + uri: str, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + """ + Shortcut to load via a YAML object. + yaml: must be from ruamel.yaml.main.YAML.load with preserve_quotes=True + """ + add_lc_filename(yaml, uri) + + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=uri) + + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + yaml, + uri, + loadingOptions, + ) + return result From 6f544e9749f837af42eb5cd0fc3c247d9a8c8f50 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Thu, 11 May 2023 18:58:59 -0400 Subject: [PATCH 31/44] updating python codegen/codegen_support, metaschema, and tests. --- schema_salad/metaschema.py | 1414 +++++++++++++---------- schema_salad/python_codegen.py | 181 ++- schema_salad/python_codegen_support.py | 15 +- schema_salad/tests/test_line_numbers.py | 214 ++-- 4 files changed, 1107 insertions(+), 717 deletions(-) diff --git a/schema_salad/metaschema.py b/schema_salad/metaschema.py index dc30039e5..4a0707dde 100644 --- a/schema_salad/metaschema.py +++ b/schema_salad/metaschema.py @@ -47,6 +47,7 @@ doc_line_info = CommentedMap() +inserted_line_info: Dict[int, int] = {} class LoadingOptions: @@ -256,30 +257,94 @@ def add_kv( val: Any, max_len: int, cols: Dict[int, int], + min_col: int = 0, ) -> int: """Add key value pair into Commented Map. - Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers for each key/val pair in the old CommentedMap, - key/val pair to insert, max_line of the old CommentedMap, and max col value taken for each line. + Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers + for each key/val pair in the old CommentedMap,key/val pair to insert, max_line of the old CommentedMap, + and max col value taken for each line. """ - if key in line_numbers: # If the key to insert is in the original CommentedMap - new_doc.lc.add_kv_line_col(key, old_doc.lc.data[key]) - elif isinstance(val, (int, float, bool, str)): # If the value is hashable + if len(inserted_line_info.keys()) >= 1: + max_line = max(inserted_line_info.keys()) + 1 + else: + max_line = 0 + if ( + key in line_numbers + ): # If the key to insert is in the original CommentedMap as a key + line_info = old_doc.lc.data[key] + if line_info[0] not in inserted_line_info: + new_doc.lc.add_kv_line_col(key, old_doc.lc.data[key]) + inserted_line_info[old_doc.lc.data[key][0]] = old_doc.lc.data[key][1] + else: + line = line_info[0] + while line in inserted_line_info.keys(): + line += 1 + new_doc.lc.add_kv_line_col( + key, + [ + line, + old_doc.lc.data[key][1], + line + (line - old_doc.lc.data[key][2]), + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[line] = old_doc.lc.data[key][1] + return max_len + elif isinstance(val, (int, float, str)) and not isinstance( + val, bool + ): # If the value is hashable if val in line_numbers: # If the value is in the original CommentedMap line = line_numbers[val]["line"] + if line in inserted_line_info: + line = max_line if line in cols: col = max(line_numbers[val]["col"], cols[line]) else: col = line_numbers[val]["col"] new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 cols[line] = col + len("id") + 2 - else: # If neither the key or value is in the original CommentedMap (or value is not hashable) - new_doc.lc.add_kv_line_col(key, [max_len, 0, max_len, len(key) + 2]) - max_len += 1 - else: # If neither the key or value is in the original CommentedMap (or value is not hashable) - new_doc.lc.add_kv_line_col(key, [max_len, 0, max_len, len(key) + 2]) - max_len += 1 - return max_len + return max_len + elif isinstance(val, str): + if val + "?" in line_numbers: + line = line_numbers[val + "?"]["line"] + if line in inserted_line_info: + line = max_line + if line in cols: + col = max(line_numbers[val + "?"]["col"], cols[line]) + else: + col = line_numbers[val + "?"]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + cols[line] = col + len("id") + 2 + return max_len + elif old_doc: + if val in old_doc: + index = old_doc.lc.data.index(val) + line_info = old_doc.lc.data[index] + if line_info[0] not in inserted_line_info: + new_doc.lc.add_kv_line_col(key, old_doc.lc.data[index]) + inserted_line_info[old_doc.lc.data[index][0]] = old_doc.lc.data[ + index + ][1] + else: + new_doc.lc.add_kv_line_col( + key, + [ + max_line, + old_doc.lc.data[index][1], + max_line + (max_line - old_doc.lc.data[index][2]), + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[max_line] = old_doc.lc.data[index][1] + # If neither the key or value is in the original CommentedMap (or value is not hashable) + new_doc.lc.add_kv_line_col( + key, [max_line, min_col, max_line, min_col + len(key) + 2] + ) + inserted_line_info[max_line] = min_col + len(key) + 2 + return max_len + 1 def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: @@ -289,7 +354,9 @@ def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: only save value info if value is hashable. """ line_numbers: Dict[Any, Dict[str, int]] = {} - if isinstance(doc, dict) or doc is None: + if doc is None: + return {} + if doc.lc.data is None: return {} for key, value in doc.lc.data.items(): line_numbers[key] = {} @@ -303,10 +370,19 @@ def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: return line_numbers +def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> int: + min_col = 0 + for line in line_numbers: + if line_numbers[line]["col"] > min_col: + min_col = line_numbers[line]["col"] + return min_col + + def get_max_line_num(doc: CommentedMap) -> int: """Get the max line number for a CommentedMap. - Iterate through the the key with the highest line number until you reach a non-CommentedMap value or empty CommentedMap. + Iterate through the the key with the highest line number until you reach a non-CommentedMap value + or empty CommentedMap. """ max_line = 0 max_key = "" @@ -329,7 +405,8 @@ def save( ) -> save_type: """Save a val of any type. - Recursively calls save method from class if val is of type Saveable. Otherwise, saves val to CommentedMap or CommentedSeq + Recursively calls save method from class if val is of type Saveable. + Otherwise, saves val to CommentedMap or CommentedSeq. """ if keys is None: keys = [] @@ -356,7 +433,7 @@ def save( for i in range(0, len(val)): new_keys = keys if doc: - if i in doc: + if str(i) in doc: r.lc.data[i] = doc.lc.data[i] new_keys.append(i) r.append( @@ -1175,14 +1252,19 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = doc_line_info - for key in keys: + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + for key in keys: if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -1193,18 +1275,55 @@ def save( else: doc = None break + if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) line_numbers = get_line_numbers(doc) max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) cols: Dict[int, int] = {} + if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u max_len = add_kv( @@ -1213,58 +1332,43 @@ def save( line_numbers=line_numbers, key="name", val=r.get("name"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.doc is not None: - saved_val = save( + if self.doc is not None and "doc" not in r: + r["doc"] = save( self.doc, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["doc"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["doc"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="doc", val=r.get("doc"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.type is not None: - saved_val = save( + if self.type is not None and "type" not in r: + r["type"] = save( self.type, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["type"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["type"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="type", val=r.get("type"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) # top refers to the directory level @@ -1382,14 +1486,19 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = doc_line_info - for key in keys: + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + for key in keys: if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -1400,66 +1509,81 @@ def save( else: doc = None break + if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) line_numbers = get_line_numbers(doc) max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) cols: Dict[int, int] = {} + if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.fields is not None: - saved_val = save( - self.fields, - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + ["fields"], - ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["fields"] = saved_val + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, top=False, base_url=base_url, relative_uris=relative_uris + ) max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="fields", val=r.get("fields"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.type is not None: - saved_val = save( - self.type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + ["type"], + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=base_url, relative_uris=relative_uris ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["type"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="type", val=r.get("type"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) # top refers to the directory level @@ -1614,14 +1738,19 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = doc_line_info - for key in keys: + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + for key in keys: if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -1632,18 +1761,55 @@ def save( else: doc = None break + if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) line_numbers = get_line_numbers(doc) max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) cols: Dict[int, int] = {} + if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u max_len = add_kv( @@ -1652,11 +1818,14 @@ def save( line_numbers=line_numbers, key="name", val=r.get("name"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris ) - if self.symbols is not None: - u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) r["symbols"] = u max_len = add_kv( old_doc=doc, @@ -1664,33 +1833,26 @@ def save( line_numbers=line_numbers, key="symbols", val=r.get("symbols"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.type is not None: - saved_val = save( + if self.type is not None and "type" not in r: + r["type"] = save( self.type, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["type"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["type"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="type", val=r.get("type"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) # top refers to the directory level @@ -1805,14 +1967,19 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = doc_line_info - for key in keys: + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + for key in keys: if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -1823,18 +1990,55 @@ def save( else: doc = None break + if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) line_numbers = get_line_numbers(doc) max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) cols: Dict[int, int] = {} + if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.items is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.items is not None and "items" not in r: u = save_relative_uri(self.items, base_url, False, 2, relative_uris) r["items"] = u max_len = add_kv( @@ -1843,33 +2047,23 @@ def save( line_numbers=line_numbers, key="items", val=r.get("items"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.type is not None: - saved_val = save( - self.type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + ["type"], + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, top=False, base_url=base_url, relative_uris=relative_uris ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["type"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="type", val=r.get("type"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) # top refers to the directory level @@ -2211,14 +2405,19 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = doc_line_info - for key in keys: + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + for key in keys: if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -2229,18 +2428,55 @@ def save( else: doc = None break + if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) line_numbers = get_line_numbers(doc) max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) cols: Dict[int, int] = {} + if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self._id is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self._id is not None and "_id" not in r: u = save_relative_uri(self._id, base_url, True, None, relative_uris) r["_id"] = u max_len = add_kv( @@ -2249,258 +2485,164 @@ def save( line_numbers=line_numbers, key="_id", val=r.get("_id"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self._type is not None: - saved_val = save( - self._type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + ["_type"], + if self._type is not None and "_type" not in r: + r["_type"] = save( + self._type, top=False, base_url=base_url, relative_uris=relative_uris ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["_type"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="_type", val=r.get("_type"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self._container is not None: - saved_val = save( + if self._container is not None and "_container" not in r: + r["_container"] = save( self._container, top=False, base_url=base_url, relative_uris=relative_uris, - keys=keys + ["_container"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["_container"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="_container", val=r.get("_container"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.identity is not None: - saved_val = save( - self.identity, - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + ["identity"], + if self.identity is not None and "identity" not in r: + r["identity"] = save( + self.identity, top=False, base_url=base_url, relative_uris=relative_uris ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["identity"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="identity", val=r.get("identity"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.noLinkCheck is not None: - saved_val = save( + if self.noLinkCheck is not None and "noLinkCheck" not in r: + r["noLinkCheck"] = save( self.noLinkCheck, top=False, base_url=base_url, relative_uris=relative_uris, - keys=keys + ["noLinkCheck"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["noLinkCheck"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="noLinkCheck", val=r.get("noLinkCheck"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.mapSubject is not None: - saved_val = save( + if self.mapSubject is not None and "mapSubject" not in r: + r["mapSubject"] = save( self.mapSubject, top=False, base_url=base_url, relative_uris=relative_uris, - keys=keys + ["mapSubject"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["mapSubject"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="mapSubject", val=r.get("mapSubject"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.mapPredicate is not None: - saved_val = save( + if self.mapPredicate is not None and "mapPredicate" not in r: + r["mapPredicate"] = save( self.mapPredicate, top=False, base_url=base_url, relative_uris=relative_uris, - keys=keys + ["mapPredicate"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["mapPredicate"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="mapPredicate", val=r.get("mapPredicate"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.refScope is not None: - saved_val = save( - self.refScope, - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + ["refScope"], + if self.refScope is not None and "refScope" not in r: + r["refScope"] = save( + self.refScope, top=False, base_url=base_url, relative_uris=relative_uris ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["refScope"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="refScope", val=r.get("refScope"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.typeDSL is not None: - saved_val = save( - self.typeDSL, - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + ["typeDSL"], + if self.typeDSL is not None and "typeDSL" not in r: + r["typeDSL"] = save( + self.typeDSL, top=False, base_url=base_url, relative_uris=relative_uris ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["typeDSL"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="typeDSL", val=r.get("typeDSL"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.secondaryFilesDSL is not None: - saved_val = save( + if self.secondaryFilesDSL is not None and "secondaryFilesDSL" not in r: + r["secondaryFilesDSL"] = save( self.secondaryFilesDSL, top=False, base_url=base_url, relative_uris=relative_uris, - keys=keys + ["secondaryFilesDSL"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["secondaryFilesDSL"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="secondaryFilesDSL", val=r.get("secondaryFilesDSL"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.subscope is not None: - saved_val = save( - self.subscope, - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + ["subscope"], + if self.subscope is not None and "subscope" not in r: + r["subscope"] = save( + self.subscope, top=False, base_url=base_url, relative_uris=relative_uris ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["subscope"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="subscope", val=r.get("subscope"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) # top refers to the directory level @@ -2632,14 +2774,19 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = doc_line_info - for key in keys: + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + for key in keys: if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -2650,18 +2797,55 @@ def save( else: doc = None break + if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) line_numbers = get_line_numbers(doc) max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) cols: Dict[int, int] = {} + if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.specializeFrom is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.specializeFrom is not None and "specializeFrom" not in r: u = save_relative_uri( self.specializeFrom, base_url, False, 1, relative_uris ) @@ -2672,10 +2856,11 @@ def save( line_numbers=line_numbers, key="specializeFrom", val=r.get("specializeFrom"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.specializeTo is not None: + if self.specializeTo is not None and "specializeTo" not in r: u = save_relative_uri(self.specializeTo, base_url, False, 1, relative_uris) r["specializeTo"] = u max_len = add_kv( @@ -2684,8 +2869,9 @@ def save( line_numbers=line_numbers, key="specializeTo", val=r.get("specializeTo"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) # top refers to the directory level @@ -2905,14 +3091,19 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = doc_line_info - for key in keys: + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + for key in keys: if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -2923,18 +3114,55 @@ def save( else: doc = None break + if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) line_numbers = get_line_numbers(doc) max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) cols: Dict[int, int] = {} + if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u max_len = add_kv( @@ -2943,108 +3171,77 @@ def save( line_numbers=line_numbers, key="name", val=r.get("name"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.doc is not None: - saved_val = save( + if self.doc is not None and "doc" not in r: + r["doc"] = save( self.doc, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["doc"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["doc"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="doc", val=r.get("doc"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.type is not None: - saved_val = save( + if self.type is not None and "type" not in r: + r["type"] = save( self.type, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["type"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["type"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="type", val=r.get("type"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.jsonldPredicate is not None: - saved_val = save( + if self.jsonldPredicate is not None and "jsonldPredicate" not in r: + r["jsonldPredicate"] = save( self.jsonldPredicate, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["jsonldPredicate"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["jsonldPredicate"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="jsonldPredicate", val=r.get("jsonldPredicate"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.default is not None: - saved_val = save( + if self.default is not None and "default" not in r: + r["default"] = save( self.default, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["default"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["default"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="default", val=r.get("default"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) # top refers to the directory level @@ -3433,14 +3630,19 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = doc_line_info - for key in keys: + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + for key in keys: if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -3451,18 +3653,55 @@ def save( else: doc = None break + if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) line_numbers = get_line_numbers(doc) max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) cols: Dict[int, int] = {} + if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u max_len = add_kv( @@ -3471,111 +3710,82 @@ def save( line_numbers=line_numbers, key="name", val=r.get("name"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.inVocab is not None: - saved_val = save( + if self.inVocab is not None and "inVocab" not in r: + r["inVocab"] = save( self.inVocab, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["inVocab"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["inVocab"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="inVocab", val=r.get("inVocab"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.fields is not None: - saved_val = save( + if self.fields is not None and "fields" not in r: + r["fields"] = save( self.fields, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["fields"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["fields"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="fields", val=r.get("fields"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.type is not None: - saved_val = save( + if self.type is not None and "type" not in r: + r["type"] = save( self.type, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["type"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["type"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="type", val=r.get("type"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.doc is not None: - saved_val = save( + if self.doc is not None and "doc" not in r: + r["doc"] = save( self.doc, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["doc"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["doc"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="doc", val=r.get("doc"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.docParent is not None and "docParent" not in r: + u = save_relative_uri( + self.docParent, str(self.name), False, None, relative_uris ) - if self.docParent is not None: - u = save_relative_uri(self.docParent, self.name, False, None, relative_uris) r["docParent"] = u max_len = add_kv( old_doc=doc, @@ -3583,11 +3793,14 @@ def save( line_numbers=line_numbers, key="docParent", val=r.get("docParent"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.docChild is not None and "docChild" not in r: + u = save_relative_uri( + self.docChild, str(self.name), False, None, relative_uris ) - if self.docChild is not None: - u = save_relative_uri(self.docChild, self.name, False, None, relative_uris) r["docChild"] = u max_len = add_kv( old_doc=doc, @@ -3595,11 +3808,14 @@ def save( line_numbers=line_numbers, key="docChild", val=r.get("docChild"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.docAfter is not None and "docAfter" not in r: + u = save_relative_uri( + self.docAfter, str(self.name), False, None, relative_uris ) - if self.docAfter is not None: - u = save_relative_uri(self.docAfter, self.name, False, None, relative_uris) r["docAfter"] = u max_len = add_kv( old_doc=doc, @@ -3607,86 +3823,63 @@ def save( line_numbers=line_numbers, key="docAfter", val=r.get("docAfter"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.jsonldPredicate is not None: - saved_val = save( + if self.jsonldPredicate is not None and "jsonldPredicate" not in r: + r["jsonldPredicate"] = save( self.jsonldPredicate, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["jsonldPredicate"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["jsonldPredicate"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="jsonldPredicate", val=r.get("jsonldPredicate"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.documentRoot is not None: - saved_val = save( + if self.documentRoot is not None and "documentRoot" not in r: + r["documentRoot"] = save( self.documentRoot, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["documentRoot"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["documentRoot"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="documentRoot", val=r.get("documentRoot"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.abstract is not None: - saved_val = save( + if self.abstract is not None and "abstract" not in r: + r["abstract"] = save( self.abstract, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["abstract"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["abstract"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="abstract", val=r.get("abstract"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.extends is not None: - u = save_relative_uri(self.extends, self.name, False, 1, relative_uris) + if self.extends is not None and "extends" not in r: + u = save_relative_uri(self.extends, str(self.name), False, 1, relative_uris) r["extends"] = u max_len = add_kv( old_doc=doc, @@ -3694,33 +3887,26 @@ def save( line_numbers=line_numbers, key="extends", val=r.get("extends"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.specialize is not None: - saved_val = save( + if self.specialize is not None and "specialize" not in r: + r["specialize"] = save( self.specialize, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["specialize"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["specialize"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="specialize", val=r.get("specialize"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) # top refers to the directory level @@ -4081,14 +4267,19 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = doc_line_info - for key in keys: + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + for key in keys: if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -4099,18 +4290,55 @@ def save( else: doc = None break + if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) line_numbers = get_line_numbers(doc) max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) cols: Dict[int, int] = {} + if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u max_len = add_kv( @@ -4119,36 +4347,31 @@ def save( line_numbers=line_numbers, key="name", val=r.get("name"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.inVocab is not None: - saved_val = save( + if self.inVocab is not None and "inVocab" not in r: + r["inVocab"] = save( self.inVocab, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["inVocab"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["inVocab"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="inVocab", val=r.get("inVocab"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris ) - if self.symbols is not None: - u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) r["symbols"] = u max_len = add_kv( old_doc=doc, @@ -4156,61 +4379,48 @@ def save( line_numbers=line_numbers, key="symbols", val=r.get("symbols"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.type is not None: - saved_val = save( + if self.type is not None and "type" not in r: + r["type"] = save( self.type, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["type"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["type"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="type", val=r.get("type"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.doc is not None: - saved_val = save( + if self.doc is not None and "doc" not in r: + r["doc"] = save( self.doc, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["doc"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["doc"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="doc", val=r.get("doc"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.docParent is not None and "docParent" not in r: + u = save_relative_uri( + self.docParent, str(self.name), False, None, relative_uris ) - if self.docParent is not None: - u = save_relative_uri(self.docParent, self.name, False, None, relative_uris) r["docParent"] = u max_len = add_kv( old_doc=doc, @@ -4218,11 +4428,14 @@ def save( line_numbers=line_numbers, key="docParent", val=r.get("docParent"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.docChild is not None and "docChild" not in r: + u = save_relative_uri( + self.docChild, str(self.name), False, None, relative_uris ) - if self.docChild is not None: - u = save_relative_uri(self.docChild, self.name, False, None, relative_uris) r["docChild"] = u max_len = add_kv( old_doc=doc, @@ -4230,11 +4443,14 @@ def save( line_numbers=line_numbers, key="docChild", val=r.get("docChild"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.docAfter is not None and "docAfter" not in r: + u = save_relative_uri( + self.docAfter, str(self.name), False, None, relative_uris ) - if self.docAfter is not None: - u = save_relative_uri(self.docAfter, self.name, False, None, relative_uris) r["docAfter"] = u max_len = add_kv( old_doc=doc, @@ -4242,61 +4458,46 @@ def save( line_numbers=line_numbers, key="docAfter", val=r.get("docAfter"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.jsonldPredicate is not None: - saved_val = save( + if self.jsonldPredicate is not None and "jsonldPredicate" not in r: + r["jsonldPredicate"] = save( self.jsonldPredicate, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["jsonldPredicate"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["jsonldPredicate"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="jsonldPredicate", val=r.get("jsonldPredicate"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.documentRoot is not None: - saved_val = save( + if self.documentRoot is not None and "documentRoot" not in r: + r["documentRoot"] = save( self.documentRoot, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["documentRoot"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["documentRoot"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="documentRoot", val=r.get("documentRoot"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.extends is not None: - u = save_relative_uri(self.extends, self.name, False, 1, relative_uris) + if self.extends is not None and "extends" not in r: + u = save_relative_uri(self.extends, str(self.name), False, 1, relative_uris) r["extends"] = u max_len = add_kv( old_doc=doc, @@ -4304,8 +4505,9 @@ def save( line_numbers=line_numbers, key="extends", val=r.get("extends"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) # top refers to the directory level @@ -4576,14 +4778,19 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = doc_line_info - for key in keys: + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + for key in keys: if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -4594,18 +4801,55 @@ def save( else: doc = None break + if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) line_numbers = get_line_numbers(doc) max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) cols: Dict[int, int] = {} + if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) + if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u max_len = add_kv( @@ -4614,61 +4858,48 @@ def save( line_numbers=line_numbers, key="name", val=r.get("name"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.inVocab is not None: - saved_val = save( + if self.inVocab is not None and "inVocab" not in r: + r["inVocab"] = save( self.inVocab, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["inVocab"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["inVocab"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="inVocab", val=r.get("inVocab"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.doc is not None: - saved_val = save( + if self.doc is not None and "doc" not in r: + r["doc"] = save( self.doc, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["doc"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["doc"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="doc", val=r.get("doc"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.docParent is not None and "docParent" not in r: + u = save_relative_uri( + self.docParent, str(self.name), False, None, relative_uris ) - if self.docParent is not None: - u = save_relative_uri(self.docParent, self.name, False, None, relative_uris) r["docParent"] = u max_len = add_kv( old_doc=doc, @@ -4676,11 +4907,14 @@ def save( line_numbers=line_numbers, key="docParent", val=r.get("docParent"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.docChild is not None and "docChild" not in r: + u = save_relative_uri( + self.docChild, str(self.name), False, None, relative_uris ) - if self.docChild is not None: - u = save_relative_uri(self.docChild, self.name, False, None, relative_uris) r["docChild"] = u max_len = add_kv( old_doc=doc, @@ -4688,11 +4922,14 @@ def save( line_numbers=line_numbers, key="docChild", val=r.get("docChild"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, + ) + if self.docAfter is not None and "docAfter" not in r: + u = save_relative_uri( + self.docAfter, str(self.name), False, None, relative_uris ) - if self.docAfter is not None: - u = save_relative_uri(self.docAfter, self.name, False, None, relative_uris) r["docAfter"] = u max_len = add_kv( old_doc=doc, @@ -4700,33 +4937,26 @@ def save( line_numbers=line_numbers, key="docAfter", val=r.get("docAfter"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) - if self.type is not None: - saved_val = save( + if self.type is not None and "type" not in r: + r["type"] = save( self.type, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, - keys=keys + ["type"], ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - r["type"] = saved_val - max_len = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, key="type", val=r.get("type"), - max_len=max_len, cols=cols, + min_col=min_col, + max_len=max_len, ) # top refers to the directory level diff --git a/schema_salad/python_codegen.py b/schema_salad/python_codegen.py index 67ffe3fd9..6e1ff3239 100644 --- a/schema_salad/python_codegen.py +++ b/schema_salad/python_codegen.py @@ -270,9 +270,9 @@ def fromDoc( ) self.idfield = idfield - - self.serializer.write( - """ + if "id" in field_names: + self.serializer.write( + """ def save( self, top: bool = False, @@ -283,9 +283,69 @@ def save( if keys is None: keys = [] r = CommentedMap() - doc = doc_line_info + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + doc = None + else: + doc = None + break + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] +""" + ) + else: + self.serializer.write( + """ + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + doc = copy.copy(doc_line_info) + keys = copy.copy(keys) + for key in keys: if isinstance(doc, CommentedMap): doc = doc.get(key) elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): @@ -296,13 +356,14 @@ def save( else: doc = None break + if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) line_numbers = get_line_numbers(doc) max_len = get_max_line_num(doc) min_col = get_min_col(line_numbers) cols: Dict[int, int] = {} - skipped = set() + if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] @@ -310,7 +371,7 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] """ - ) + ) if "class" in field_names: self.out.write( @@ -333,40 +394,80 @@ def save( if "id" in field_names: self.serializer.write( """ - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - saved_val = save( - getattr(self, key), - top=False, - base_url=self.id, - relative_uris=relative_uris, - keys=keys + [key], + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len + ) +""" + ) + else: + self.serializer.write( + """ + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len ) - - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): # If the returned value is a list of size 1, just save the value in the list - saved_val = saved_val[0] - - r[key] = saved_val - - max_len = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len - ) - - - """ ) # self.serializer.write( @@ -687,7 +788,7 @@ def declare_field( if name == self.idfield or not self.idfield: baseurl = "base_url" else: - baseurl = f"self.{self.safe_name(self.idfield)}" + baseurl = f"str(self.{self.safe_name(self.idfield)})" if fieldtype.is_uri: self.serializer.write( diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index 14ee8c100..95780a76c 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -274,16 +274,19 @@ def add_kv( new_doc.lc.add_kv_line_col(key, old_doc.lc.data[key]) inserted_line_info[old_doc.lc.data[key][0]] = old_doc.lc.data[key][1] else: + line = line_info[0] + while line in inserted_line_info.keys(): + line += 1 new_doc.lc.add_kv_line_col( key, [ - max_line, + line, old_doc.lc.data[key][1], - max_line + (max_line - old_doc.lc.data[key][2]), + line + (line - old_doc.lc.data[key][2]), old_doc.lc.data[key][3], ], ) - inserted_line_info[max_line] = old_doc.lc.data[key][1] + inserted_line_info[line] = old_doc.lc.data[key][1] return max_len elif isinstance(val, (int, float, str)) and not isinstance( val, bool @@ -315,7 +318,7 @@ def add_kv( return max_len elif old_doc: if val in old_doc: - index = old_doc.index(val) + index = old_doc.lc.data.index(val) line_info = old_doc.lc.data[index] if line_info[0] not in inserted_line_info: new_doc.lc.add_kv_line_col(key, old_doc.lc.data[index]) @@ -338,7 +341,6 @@ def add_kv( key, [max_line, min_col, max_line, min_col + len(key) + 2] ) inserted_line_info[max_line] = min_col + len(key) + 2 - return max_len + 1 @@ -365,7 +367,7 @@ def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: return line_numbers -def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> str: +def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> int: min_col = 0 for line in line_numbers: if line_numbers[line]["col"] > min_col: @@ -384,7 +386,6 @@ def get_max_line_num(doc: CommentedMap) -> int: cur = doc while isinstance(cur, CommentedMap) and len(cur) > 0: for key in cur.lc.data.keys(): - # print(cur.lc.data[key][2]) if cur.lc.data[key][2] >= max_line: max_line = cur.lc.data[key][2] max_key = key diff --git a/schema_salad/tests/test_line_numbers.py b/schema_salad/tests/test_line_numbers.py index 22f5229d7..43d02eced 100644 --- a/schema_salad/tests/test_line_numbers.py +++ b/schema_salad/tests/test_line_numbers.py @@ -1,83 +1,141 @@ # from parser import load_document_by_uri, save -import os from pathlib import Path -from typing import Any, Dict, List, Optional, cast +from typing import MutableSequence, Optional, cast, Any +from urllib.parse import unquote_plus, urlparse -from ruamel.yaml.comments import CommentedMap, CommentedSeq - -from schema_salad import codegen -from schema_salad.avro.schema import Names -from schema_salad.schema import load_schema +import schema_salad.tests.cwl_v1_2 as cwl_v1_2 from schema_salad.utils import yaml_no_ts - - -# def load_yaml(file_name: str) -> CommentedMap: -# assert os.path.isfile(file_name) -# with open(file_name) as f: -# yaml = yaml_no_ts() -# doc = yaml.load(f.read()) -# return doc - - -# def test_line_number_comparision() -> None: - # v0_doc = load_yaml(count_lines["v0"]) - # v1_doc = load_yaml(count_lines["v1"]) - # v2_doc = load_yaml(count_lines["v2"]) - - -def compare_line_numbers(original_doc: CommentedMap, codegen_doc: CommentedMap) -> None: - assert type(original_doc) == CommentedMap - assert type(codegen_doc) == CommentedMap - - assert original_doc.lc.line == codegen_doc.lc.line - assert original_doc.lc.col == codegen_doc.lc.col - - for key, lc_info in original_doc.lc.data.items(): - assert key in codegen_doc.lc.data - assert lc_info == codegen_doc.lc.data[key] - - max_line = get_max_line_number(original_doc) - - for key, lc_info in codegen_doc.lc.data.items(): - if key in original_doc: - continue - assert lc_info == [max_line, 0, max_line, len(key) + 2] - max_line += 1 - - -def get_max_line_number(original_doc: CommentedMap) -> int: - max_key = "" - max_line = 0 - temp_doc = original_doc - while (type(temp_doc) == CommentedMap) and len(temp_doc) > 0: - for key, lc_info in temp_doc.lc.data.items(): - if lc_info[0] >= max_line: - max_line = lc_info[0] - max_key = key - temp_doc = temp_doc[max_key] - return max_line + 1 - - -def python_codegen( - file_uri: str, - target: Path, - parser_info: Optional[str] = None, - package: Optional[str] = None, -) -> None: - document_loader, avsc_names, schema_metadata, metaschema_loader = load_schema( - file_uri - ) - assert isinstance(avsc_names, Names) - schema_raw_doc = metaschema_loader.fetch(file_uri) - schema_doc, schema_metadata = metaschema_loader.resolve_all( - schema_raw_doc, file_uri - ) - codegen.codegen( - "python", - cast(List[Dict[str, Any]], schema_doc), - schema_metadata, - document_loader, - target=str(target), - parser_info=parser_info, - package=package, +from ruamel.yaml.comments import CommentedMap + +from .util import get_data + + +def test_secondary_files_dsl() -> None: + """ + Checks object is properly saving when dsl is used + """ + t = "test_schema/test_secondary_files_dsl.cwl" + path = get_data("tests/" + t) + obj = load_document_by_uri(str(path)) + saved_obj = obj.save() + assert isinstance(saved_obj, CommentedMap) + assert saved_obj.lc.data == {'cwlVersion': [1, 0, 1, 12], + 'baseCommand': [2, 0, 2, 13], + 'inputs': [4, 0, 5, 2], + 'outputs': [10, 0, 11, 2], + 'stdout': [19, 0, 21, 8], + 'id': [20, 0, 20, 4] + } + assert saved_obj['inputs'][0].lc.data == {'type': [6, 3, 6, 9], + 'default': [7, 3, 7, 12], + 'id': [5, 2, 5, 6] + } + assert saved_obj['inputs'][0]['type'] == 'File' + assert saved_obj['inputs'][1].lc.data == {'id': [8, 2, 8, 6], 'type': [9, 2, 9, 8]} + assert saved_obj['outputs'][0].lc.data == {'type': [12, 4, 12, 10], + 'secondaryFiles': [16, 4, 19, 20], + 'outputBinding': [18, 4, 21, 6], + 'id': [11, 2, 11, 6] + } + assert saved_obj["outputs"][0]['secondaryFiles'][0].lc.data == {'pattern': [13, 35, 13, 44]} + assert saved_obj["outputs"][0]['secondaryFiles'][1].lc.data == {'pattern': [14, 35, 14, 44], + 'required': [15, 35, 15, 45] + } + + cwl_v1_2.inserted_line_info = {} + + +def test_outputs_before_inputs() -> None: + """ + Tests when output comes in cwl file before inputs + """ + t = "test_schema/test_outputs_before_inputs.cwl" + path = get_data("tests/" + t) + obj = load_document_by_uri(str(path)) + saved_obj = obj.save() + assert isinstance(saved_obj, CommentedMap) + assert saved_obj.lc.data == {'cwlVersion': [1, 0, 1, 12], + 'baseCommand': [2, 0, 2, 13], + 'outputs': [4, 0, 5, 2], + 'inputs': [10, 0, 11, 2], + 'stdout': [16, 0, 16, 8], + 'id': [17, 0, 17, 4] + } + assert saved_obj['inputs'][0].lc.data == {'type': [12, 3, 12, 9], + 'default': [13, 3, 13, 12], + 'id': [11, 2, 11, 6] + } + assert saved_obj['inputs'][0]['type'] == 'File' + assert saved_obj['inputs'][1].lc.data == {'id': [14, 2, 14, 6], 'type': [15, 2, 15, 8]} + assert saved_obj['outputs'][0].lc.data == {'type': [6, 4, 6, 10], + 'outputBinding': [7, 4, 8, 6], + 'id': [5, 2, 5, 6] + } + cwl_v1_2.inserted_line_info = {} + + +def test_type_dsl() -> None: + """ + Checks object is properly saving when type DSL is used. + In this example, type for the input is File? which should expand to + null, File. + """ + t = "test_schema/test_type_dsl.cwl" + path = get_data("tests/" + t) + obj = load_document_by_uri(str(path)) + saved_obj = obj.save() + assert isinstance(saved_obj, CommentedMap) + assert saved_obj.lc.data == {'cwlVersion': [1, 0, 1, 12], + 'baseCommand': [2, 0, 2, 13], + 'inputs': [4, 0, 5, 2], + 'outputs': [10, 0, 11, 2], + 'stdout': [16, 0, 16, 8], + 'id': [17, 0, 17, 4] + } + assert saved_obj['inputs'][0].lc.data == {'type': [6, 3, 6, 9], + 'default': [7, 3, 7, 12], + 'id': [5, 2, 5, 6] + } + assert saved_obj['inputs'][0]['type'] == ['null', 'File'] + assert saved_obj['inputs'][1].lc.data == {'id': [8, 2, 8, 6], + 'type': [9, 2, 9, 8] + } + assert saved_obj['outputs'][0].lc.data == {'type': [12, 4, 12, 10], + 'outputBinding': [13, 4, 14, 6], + 'id': [11, 2, 11, 6] + } + assert saved_obj["outputs"][0]['outputBinding'].lc.data == {'glob': [14, 6, 14, 12]} + + +def load_document_by_uri(path: str) -> Any: + """ + Takes in a path and loads it via the python codegen. + """ + if isinstance(path, str): + uri = urlparse(path) + if not uri.scheme or uri.scheme == "file": + real_path = Path(unquote_plus(uri.path)).resolve().as_uri() + else: + real_path = path + else: + real_path = path.resolve().as_uri() + + baseuri = str(real_path) + + loadingOptions = cwl_v1_2.LoadingOptions(fileuri=baseuri) + + doc = loadingOptions.fetcher.fetch_text(real_path) + + yaml = yaml_no_ts() + doc = yaml.load(doc) + + result = cwl_v1_2.load_document_by_yaml( + doc, baseuri, cast(Optional[cwl_v1_2.LoadingOptions], loadingOptions) ) + + if isinstance(result, MutableSequence): + lst = [] + for r in result: + lst.append(r) + return lst + return result From 74e32475b475a4524624e4f54d303f8770c6e208 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Thu, 11 May 2023 19:05:46 -0400 Subject: [PATCH 32/44] running make clean-up --- schema_salad/metaschema.py | 100 ++- schema_salad/python_codegen.py | 115 ++-- schema_salad/tests/cwl_v1_0.py | 714 +++++++++++----------- schema_salad/tests/cwl_v1_1.py | 769 +++++++++++------------- schema_salad/tests/test_line_numbers.py | 136 +++-- 5 files changed, 863 insertions(+), 971 deletions(-) diff --git a/schema_salad/metaschema.py b/schema_salad/metaschema.py index 4a0707dde..309dd920f 100644 --- a/schema_salad/metaschema.py +++ b/schema_salad/metaschema.py @@ -1256,7 +1256,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -1295,7 +1295,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -1306,9 +1306,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1321,7 +1319,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1490,7 +1488,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -1529,7 +1527,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -1540,9 +1538,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1555,7 +1551,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -1742,7 +1738,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -1781,7 +1777,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -1792,9 +1788,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1807,7 +1801,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1971,7 +1965,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -2010,7 +2004,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -2021,9 +2015,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -2036,7 +2028,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.items is not None and "items" not in r: u = save_relative_uri(self.items, base_url, False, 2, relative_uris) @@ -2409,7 +2401,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -2448,7 +2440,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -2459,9 +2451,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -2474,7 +2464,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self._id is not None and "_id" not in r: u = save_relative_uri(self._id, base_url, True, None, relative_uris) @@ -2778,7 +2768,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -2817,7 +2807,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -2828,9 +2818,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -2843,7 +2831,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.specializeFrom is not None and "specializeFrom" not in r: u = save_relative_uri( @@ -3095,7 +3083,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -3134,7 +3122,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -3145,9 +3133,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3160,7 +3146,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3634,7 +3620,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -3673,7 +3659,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -3684,9 +3670,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3699,7 +3683,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4271,7 +4255,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -4310,7 +4294,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -4321,9 +4305,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4336,7 +4318,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4782,7 +4764,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -4821,7 +4803,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -4832,9 +4814,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4847,7 +4827,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) diff --git a/schema_salad/python_codegen.py b/schema_salad/python_codegen.py index 6e1ff3239..3b6c91c1d 100644 --- a/schema_salad/python_codegen.py +++ b/schema_salad/python_codegen.py @@ -470,63 +470,64 @@ def save( ) """ ) -# self.serializer.write( -# """ -# if self.id is not None and "id" not in r: -# u = save_relative_uri(self.id, base_url, True, None, relative_uris) -# r["id"] = u -# add_kv( -# old_doc=doc, -# new_doc=r, -# line_numbers=line_numbers, -# key="id", -# val=r.get("id"), -# cols=cols, -# min_col=min_col, -# max_len=max_len -# ) -# if doc: -# if u in doc: -# keys.append(u) -# if isinstance(doc.get(u), (CommentedMap, CommentedSeq)): -# doc = doc.get(u) -# line_numbers = get_line_numbers(doc) -# min_col = get_min_col(line_numbers) -# """ -# ) -# else: -# self.serializer.write( -# """ -# for key in self.ordered_attrs.keys(): -# if isinstance(key, str) and key not in r: -# if getattr(self, key) is not None: -# saved_val = save( -# getattr(self, key), -# top=False, -# base_url=base_url, -# relative_uris=relative_uris, -# keys=keys + [key], -# ) - -# if type(saved_val) == list: -# if ( -# len(saved_val) == 1 -# ): # If the returned value is a list of size 1, just save the value in the list -# saved_val = saved_val[0] -# r[key] = saved_val - -# add_kv( -# old_doc=doc, -# new_doc=r, -# line_numbers=line_numbers, -# key=key, -# val=r.get(key), -# cols=cols, -# min_col=min_col, -# max_len=max_len -# ) -# """ -# ) + + # self.serializer.write( + # """ + # if self.id is not None and "id" not in r: + # u = save_relative_uri(self.id, base_url, True, None, relative_uris) + # r["id"] = u + # add_kv( + # old_doc=doc, + # new_doc=r, + # line_numbers=line_numbers, + # key="id", + # val=r.get("id"), + # cols=cols, + # min_col=min_col, + # max_len=max_len + # ) + # if doc: + # if u in doc: + # keys.append(u) + # if isinstance(doc.get(u), (CommentedMap, CommentedSeq)): + # doc = doc.get(u) + # line_numbers = get_line_numbers(doc) + # min_col = get_min_col(line_numbers) + # """ + # ) + # else: + # self.serializer.write( + # """ + # for key in self.ordered_attrs.keys(): + # if isinstance(key, str) and key not in r: + # if getattr(self, key) is not None: + # saved_val = save( + # getattr(self, key), + # top=False, + # base_url=base_url, + # relative_uris=relative_uris, + # keys=keys + [key], + # ) + + # if type(saved_val) == list: + # if ( + # len(saved_val) == 1 + # ): # If the returned value is a list of size 1, just save the value in the list + # saved_val = saved_val[0] + # r[key] = saved_val + + # add_kv( + # old_doc=doc, + # new_doc=r, + # line_numbers=line_numbers, + # key=key, + # val=r.get(key), + # cols=cols, + # min_col=min_col, + # max_len=max_len + # ) + # """ + # ) def end_class(self, classname: str, field_names: List[str]) -> None: """Signal that we are done with this class.""" diff --git a/schema_salad/tests/cwl_v1_0.py b/schema_salad/tests/cwl_v1_0.py index 51028e9cc..95a007e47 100644 --- a/schema_salad/tests/cwl_v1_0.py +++ b/schema_salad/tests/cwl_v1_0.py @@ -1252,7 +1252,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -1291,7 +1291,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -1302,9 +1302,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1317,7 +1315,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1486,7 +1484,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -1525,7 +1523,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -1536,9 +1534,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1551,7 +1547,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -1703,7 +1699,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -1742,7 +1738,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -1753,9 +1749,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1768,7 +1762,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -1914,7 +1908,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -1953,7 +1947,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -1964,9 +1958,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1979,7 +1971,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -2424,7 +2416,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -2465,7 +2457,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -2476,9 +2468,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -2491,7 +2481,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -2887,7 +2877,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -2928,7 +2918,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -2939,9 +2929,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -2954,7 +2942,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -3238,7 +3226,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -3277,7 +3265,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -3288,9 +3276,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3303,7 +3289,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3563,7 +3549,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -3602,7 +3588,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -3613,9 +3599,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3628,7 +3612,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3890,7 +3874,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -3929,7 +3913,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -3940,9 +3924,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3955,7 +3937,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4200,7 +4182,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -4239,7 +4221,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -4250,9 +4232,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4265,7 +4245,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -4503,7 +4483,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -4542,7 +4522,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -4553,9 +4533,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4568,7 +4546,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4779,7 +4757,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -4818,7 +4796,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -4829,9 +4807,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4844,7 +4820,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -5052,7 +5028,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -5091,7 +5067,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -5102,9 +5078,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -5117,7 +5091,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -5341,7 +5315,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -5380,7 +5354,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -5391,9 +5365,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -5406,7 +5378,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -5769,7 +5741,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -5792,17 +5764,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -5827,7 +5804,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -5838,9 +5815,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -5853,7 +5828,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -6262,7 +6237,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -6285,17 +6260,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -6320,7 +6300,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -6331,9 +6311,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6346,7 +6324,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -6611,7 +6589,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -6652,7 +6630,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -6663,9 +6641,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6678,7 +6654,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.expressionLib is not None and "expressionLib" not in r: r["expressionLib"] = save( @@ -6812,7 +6788,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -6853,7 +6829,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -6864,9 +6840,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6879,7 +6853,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.types is not None and "types" not in r: r["types"] = save( @@ -7021,7 +6995,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -7060,7 +7034,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -7071,9 +7045,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7086,7 +7058,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.envName is not None and "envName" not in r: r["envName"] = save( @@ -7401,7 +7373,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -7440,7 +7412,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -7451,9 +7423,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7466,7 +7436,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -7750,7 +7720,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -7789,7 +7759,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -7800,9 +7770,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7815,7 +7783,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.glob is not None and "glob" not in r: r["glob"] = save( @@ -8066,7 +8034,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -8105,7 +8073,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -8116,9 +8084,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8131,7 +8097,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -8393,7 +8359,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -8432,7 +8398,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -8443,9 +8409,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8458,7 +8422,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -8722,7 +8686,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -8761,7 +8725,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -8772,9 +8736,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8787,7 +8749,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9034,7 +8996,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -9073,7 +9035,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -9084,9 +9046,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -9099,7 +9059,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -9339,7 +9299,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -9378,7 +9338,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -9389,9 +9349,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -9404,7 +9362,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9649,7 +9607,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -9688,7 +9646,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -9699,9 +9657,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -9714,7 +9670,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9946,7 +9902,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -9985,7 +9941,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -9996,9 +9952,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -10011,7 +9965,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -10237,7 +10191,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -10276,7 +10230,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -10287,9 +10241,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -10302,7 +10254,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -10669,7 +10621,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -10692,17 +10644,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -10727,7 +10684,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -10738,9 +10695,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -10753,7 +10708,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -11191,7 +11146,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -11214,17 +11169,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -11249,7 +11209,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -11260,9 +11220,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -11275,7 +11233,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -11879,7 +11837,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -11902,17 +11860,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -11939,7 +11902,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -11950,9 +11913,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -11965,7 +11926,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -12517,7 +12478,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -12558,7 +12519,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -12569,9 +12530,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -12584,7 +12543,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.dockerPull is not None and "dockerPull" not in r: r["dockerPull"] = save( @@ -12808,7 +12767,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -12849,7 +12808,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -12860,9 +12819,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -12875,7 +12832,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.packages is not None and "packages" not in r: r["packages"] = save( @@ -13036,7 +12993,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -13075,7 +13032,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -13086,9 +13043,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -13101,7 +13056,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.package is not None and "package" not in r: r["package"] = save( @@ -13297,7 +13252,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -13336,7 +13291,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -13347,9 +13302,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -13362,7 +13315,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.entryname is not None and "entryname" not in r: r["entryname"] = save( @@ -13519,7 +13472,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -13560,7 +13513,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -13571,9 +13524,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -13586,7 +13537,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.listing is not None and "listing" not in r: r["listing"] = save( @@ -13712,7 +13663,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -13753,7 +13704,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -13764,9 +13715,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -13779,7 +13728,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.envDef is not None and "envDef" not in r: r["envDef"] = save( @@ -13892,7 +13841,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -13933,7 +13882,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -13944,9 +13893,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -13959,7 +13906,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) # top refers to the directory level @@ -14261,7 +14208,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -14302,7 +14249,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -14313,9 +14260,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14328,7 +14273,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.coresMin is not None and "coresMin" not in r: r["coresMin"] = save( @@ -14747,7 +14692,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -14770,17 +14715,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -14805,7 +14755,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -14816,9 +14766,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14831,7 +14779,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -15271,7 +15219,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -15294,17 +15242,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -15331,7 +15284,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -15342,9 +15295,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15357,7 +15308,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -15850,7 +15801,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -15873,17 +15824,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -15908,7 +15864,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -15919,9 +15875,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15934,7 +15888,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -16354,7 +16308,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -16377,17 +16331,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -16412,7 +16371,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -16423,9 +16382,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -16438,7 +16395,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -16635,7 +16592,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -16658,17 +16615,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -16693,7 +16655,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -16704,9 +16666,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -16719,7 +16679,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -17109,7 +17069,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -17132,17 +17092,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -17167,7 +17132,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -17178,9 +17143,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -17193,7 +17156,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -17703,7 +17666,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -17726,17 +17689,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -17763,7 +17731,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -17774,9 +17742,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -17789,7 +17755,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18040,7 +18006,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -18081,7 +18047,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -18092,9 +18058,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -18107,7 +18071,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) # top refers to the directory level @@ -18201,7 +18165,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -18242,7 +18206,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -18253,9 +18217,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -18268,7 +18230,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) # top refers to the directory level @@ -18362,7 +18324,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -18403,7 +18365,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -18414,9 +18376,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -18429,7 +18389,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) # top refers to the directory level @@ -18523,7 +18483,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -18564,7 +18524,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -18575,9 +18535,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -18590,7 +18548,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) # top refers to the directory level diff --git a/schema_salad/tests/cwl_v1_1.py b/schema_salad/tests/cwl_v1_1.py index 133ccfb02..c3ad579d5 100644 --- a/schema_salad/tests/cwl_v1_1.py +++ b/schema_salad/tests/cwl_v1_1.py @@ -1256,7 +1256,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -1295,7 +1295,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -1306,9 +1306,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1321,7 +1319,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1490,7 +1488,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -1529,7 +1527,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -1540,9 +1538,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1555,7 +1551,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -1707,7 +1703,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -1746,7 +1742,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -1757,9 +1753,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1772,7 +1766,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -1918,7 +1912,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -1957,7 +1951,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -1968,9 +1962,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1983,7 +1975,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -2428,7 +2420,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -2469,7 +2461,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -2480,9 +2472,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -2495,7 +2485,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -2891,7 +2881,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -2932,7 +2922,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -2943,9 +2933,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -2958,7 +2946,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -3149,7 +3137,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -3188,7 +3176,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -3199,9 +3187,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3214,7 +3200,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -3544,7 +3530,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -3583,7 +3569,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -3594,9 +3580,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3609,7 +3593,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3969,7 +3953,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -4008,7 +3992,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -4019,9 +4003,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4034,7 +4016,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4313,7 +4295,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -4352,7 +4334,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -4363,9 +4345,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4378,7 +4358,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4655,7 +4635,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -4694,7 +4674,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -4705,9 +4685,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4720,7 +4698,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5056,7 +5034,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -5095,7 +5073,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -5106,9 +5084,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -5121,7 +5097,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5437,7 +5413,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -5476,7 +5452,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -5487,9 +5463,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -5502,7 +5476,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5781,7 +5755,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -5820,7 +5794,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -5831,9 +5805,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -5846,7 +5818,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -6123,7 +6095,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -6162,7 +6134,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -6173,9 +6145,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6188,7 +6158,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -6424,7 +6394,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -6465,7 +6435,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -6476,9 +6446,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6491,7 +6459,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.expressionLib is not None and "expressionLib" not in r: r["expressionLib"] = save( @@ -6629,7 +6597,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -6670,7 +6638,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -6681,9 +6649,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6696,7 +6662,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.types is not None and "types" not in r: r["types"] = save( @@ -6834,7 +6800,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -6873,7 +6839,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -6884,9 +6850,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6899,7 +6863,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.pattern is not None and "pattern" not in r: r["pattern"] = save( @@ -7046,7 +7010,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -7087,7 +7051,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -7098,9 +7062,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7113,7 +7075,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -7258,7 +7220,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -7297,7 +7259,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -7308,9 +7270,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7323,7 +7283,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.envName is not None and "envName" not in r: r["envName"] = save( @@ -7638,7 +7598,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -7677,7 +7637,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -7688,9 +7648,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7703,7 +7661,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8009,7 +7967,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -8048,7 +8006,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -8059,9 +8017,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8074,7 +8030,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8243,7 +8199,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -8282,7 +8238,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -8293,9 +8249,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8308,7 +8262,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -8651,7 +8605,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -8690,7 +8644,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -8701,9 +8655,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8716,7 +8668,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9122,7 +9074,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -9161,7 +9113,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -9172,9 +9124,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -9187,7 +9137,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9516,7 +9466,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -9555,7 +9505,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -9566,9 +9516,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -9581,7 +9529,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9903,7 +9851,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -9942,7 +9890,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -9953,9 +9901,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -9968,7 +9914,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10346,7 +10292,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -10385,7 +10331,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -10396,9 +10342,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -10411,7 +10355,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10755,7 +10699,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -10794,7 +10738,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -10805,9 +10749,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -10820,7 +10762,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -11101,7 +11043,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -11140,7 +11082,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -11151,9 +11093,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -11166,7 +11106,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -11445,7 +11385,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -11484,7 +11424,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -11495,9 +11435,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -11510,7 +11448,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -11942,7 +11880,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -11965,17 +11903,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -12000,7 +11943,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -12011,9 +11954,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -12026,7 +11967,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -12497,7 +12438,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -12520,17 +12461,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -12555,7 +12501,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -12566,9 +12512,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -12581,7 +12525,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -13185,7 +13129,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -13208,17 +13152,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -13245,7 +13194,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -13256,9 +13205,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -13271,7 +13218,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -13841,7 +13788,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -13882,7 +13829,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -13893,9 +13840,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -13908,7 +13853,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.dockerPull is not None and "dockerPull" not in r: r["dockerPull"] = save( @@ -14132,7 +14077,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -14173,7 +14118,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -14184,9 +14129,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14199,7 +14142,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.packages is not None and "packages" not in r: r["packages"] = save( @@ -14360,7 +14303,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -14399,7 +14342,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -14410,9 +14353,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14425,7 +14366,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.package is not None and "package" not in r: r["package"] = save( @@ -14621,7 +14562,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -14660,7 +14601,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -14671,9 +14612,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14686,7 +14625,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.entryname is not None and "entryname" not in r: r["entryname"] = save( @@ -14843,7 +14782,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -14884,7 +14823,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -14895,9 +14834,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14910,7 +14847,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.listing is not None and "listing" not in r: r["listing"] = save( @@ -15036,7 +14973,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -15077,7 +15014,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -15088,9 +15025,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15103,7 +15038,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.envDef is not None and "envDef" not in r: r["envDef"] = save( @@ -15216,7 +15151,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -15257,7 +15192,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -15268,9 +15203,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15283,7 +15216,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) # top refers to the directory level @@ -15585,7 +15518,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -15626,7 +15559,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -15637,9 +15570,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15652,7 +15583,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.coresMin is not None and "coresMin" not in r: r["coresMin"] = save( @@ -15909,7 +15840,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -15950,7 +15881,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -15961,9 +15892,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15976,7 +15905,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.enableReuse is not None and "enableReuse" not in r: r["enableReuse"] = save( @@ -16121,7 +16050,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -16162,7 +16091,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -16173,9 +16102,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -16188,7 +16115,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.networkAccess is not None and "networkAccess" not in r: r["networkAccess"] = save( @@ -16350,7 +16277,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -16391,7 +16318,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -16402,9 +16329,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -16417,7 +16342,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.inplaceUpdate is not None and "inplaceUpdate" not in r: r["inplaceUpdate"] = save( @@ -16553,7 +16478,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -16594,7 +16519,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -16605,9 +16530,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -16620,7 +16543,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.timelimit is not None and "timelimit" not in r: r["timelimit"] = save( @@ -16894,7 +16817,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -16917,17 +16840,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -16952,7 +16880,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -16963,9 +16891,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -16978,7 +16904,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -17434,7 +17360,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -17457,17 +17383,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -17492,7 +17423,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -17503,9 +17434,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -17518,7 +17447,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18018,7 +17947,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -18041,17 +17970,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -18078,7 +18012,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -18089,9 +18023,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -18104,7 +18036,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18572,7 +18504,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -18595,17 +18527,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -18630,7 +18567,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -18641,9 +18578,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -18656,7 +18591,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -19137,7 +19072,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -19160,17 +19095,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -19195,7 +19135,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -19206,9 +19146,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -19221,7 +19159,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -19484,7 +19422,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -19507,17 +19445,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -19542,7 +19485,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -19553,9 +19496,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -19568,7 +19509,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -19865,7 +19806,7 @@ def fromDoc( else: hints = None - subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) + subscope_baseuri = expand_url("run", baseuri, loadingOptions, True) try: run = load_field( _doc.get("run"), @@ -19960,7 +19901,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -19983,17 +19924,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -20018,7 +19964,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -20029,9 +19975,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -20044,7 +19988,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -20554,7 +20498,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -20577,17 +20521,22 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) + temp_doc["id"] = temp_id + temp_doc.lc.add_kv_line_col( + "id", + [ + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4, + ], + ) doc = temp_doc if doc is not None: @@ -20614,7 +20563,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -20625,9 +20574,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -20640,7 +20587,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -20891,7 +20838,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -20932,7 +20879,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -20943,9 +20890,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -20958,7 +20903,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) # top refers to the directory level @@ -21052,7 +20997,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -21093,7 +21038,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -21104,9 +21049,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -21119,7 +21062,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) # top refers to the directory level @@ -21213,7 +21156,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -21254,7 +21197,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -21265,9 +21208,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -21280,7 +21221,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) # top refers to the directory level @@ -21374,7 +21315,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, ) -> CommentedMap: if keys is None: keys = [] @@ -21415,7 +21356,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": saved_val = save( getattr(self, key), top=False, @@ -21426,9 +21367,7 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -21441,7 +21380,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, ) # top refers to the directory level diff --git a/schema_salad/tests/test_line_numbers.py b/schema_salad/tests/test_line_numbers.py index 43d02eced..567afd5a9 100644 --- a/schema_salad/tests/test_line_numbers.py +++ b/schema_salad/tests/test_line_numbers.py @@ -1,11 +1,12 @@ # from parser import load_document_by_uri, save from pathlib import Path -from typing import MutableSequence, Optional, cast, Any +from typing import Any, MutableSequence, Optional, cast from urllib.parse import unquote_plus, urlparse +from ruamel.yaml.comments import CommentedMap + import schema_salad.tests.cwl_v1_2 as cwl_v1_2 from schema_salad.utils import yaml_no_ts -from ruamel.yaml.comments import CommentedMap from .util import get_data @@ -19,28 +20,34 @@ def test_secondary_files_dsl() -> None: obj = load_document_by_uri(str(path)) saved_obj = obj.save() assert isinstance(saved_obj, CommentedMap) - assert saved_obj.lc.data == {'cwlVersion': [1, 0, 1, 12], - 'baseCommand': [2, 0, 2, 13], - 'inputs': [4, 0, 5, 2], - 'outputs': [10, 0, 11, 2], - 'stdout': [19, 0, 21, 8], - 'id': [20, 0, 20, 4] - } - assert saved_obj['inputs'][0].lc.data == {'type': [6, 3, 6, 9], - 'default': [7, 3, 7, 12], - 'id': [5, 2, 5, 6] - } - assert saved_obj['inputs'][0]['type'] == 'File' - assert saved_obj['inputs'][1].lc.data == {'id': [8, 2, 8, 6], 'type': [9, 2, 9, 8]} - assert saved_obj['outputs'][0].lc.data == {'type': [12, 4, 12, 10], - 'secondaryFiles': [16, 4, 19, 20], - 'outputBinding': [18, 4, 21, 6], - 'id': [11, 2, 11, 6] - } - assert saved_obj["outputs"][0]['secondaryFiles'][0].lc.data == {'pattern': [13, 35, 13, 44]} - assert saved_obj["outputs"][0]['secondaryFiles'][1].lc.data == {'pattern': [14, 35, 14, 44], - 'required': [15, 35, 15, 45] - } + assert saved_obj.lc.data == { + "cwlVersion": [1, 0, 1, 12], + "baseCommand": [2, 0, 2, 13], + "inputs": [4, 0, 5, 2], + "outputs": [10, 0, 11, 2], + "stdout": [19, 0, 21, 8], + "id": [20, 0, 20, 4], + } + assert saved_obj["inputs"][0].lc.data == { + "type": [6, 3, 6, 9], + "default": [7, 3, 7, 12], + "id": [5, 2, 5, 6], + } + assert saved_obj["inputs"][0]["type"] == "File" + assert saved_obj["inputs"][1].lc.data == {"id": [8, 2, 8, 6], "type": [9, 2, 9, 8]} + assert saved_obj["outputs"][0].lc.data == { + "type": [12, 4, 12, 10], + "secondaryFiles": [16, 4, 19, 20], + "outputBinding": [18, 4, 21, 6], + "id": [11, 2, 11, 6], + } + assert saved_obj["outputs"][0]["secondaryFiles"][0].lc.data == { + "pattern": [13, 35, 13, 44] + } + assert saved_obj["outputs"][0]["secondaryFiles"][1].lc.data == { + "pattern": [14, 35, 14, 44], + "required": [15, 35, 15, 45], + } cwl_v1_2.inserted_line_info = {} @@ -54,23 +61,29 @@ def test_outputs_before_inputs() -> None: obj = load_document_by_uri(str(path)) saved_obj = obj.save() assert isinstance(saved_obj, CommentedMap) - assert saved_obj.lc.data == {'cwlVersion': [1, 0, 1, 12], - 'baseCommand': [2, 0, 2, 13], - 'outputs': [4, 0, 5, 2], - 'inputs': [10, 0, 11, 2], - 'stdout': [16, 0, 16, 8], - 'id': [17, 0, 17, 4] - } - assert saved_obj['inputs'][0].lc.data == {'type': [12, 3, 12, 9], - 'default': [13, 3, 13, 12], - 'id': [11, 2, 11, 6] - } - assert saved_obj['inputs'][0]['type'] == 'File' - assert saved_obj['inputs'][1].lc.data == {'id': [14, 2, 14, 6], 'type': [15, 2, 15, 8]} - assert saved_obj['outputs'][0].lc.data == {'type': [6, 4, 6, 10], - 'outputBinding': [7, 4, 8, 6], - 'id': [5, 2, 5, 6] - } + assert saved_obj.lc.data == { + "cwlVersion": [1, 0, 1, 12], + "baseCommand": [2, 0, 2, 13], + "outputs": [4, 0, 5, 2], + "inputs": [10, 0, 11, 2], + "stdout": [16, 0, 16, 8], + "id": [17, 0, 17, 4], + } + assert saved_obj["inputs"][0].lc.data == { + "type": [12, 3, 12, 9], + "default": [13, 3, 13, 12], + "id": [11, 2, 11, 6], + } + assert saved_obj["inputs"][0]["type"] == "File" + assert saved_obj["inputs"][1].lc.data == { + "id": [14, 2, 14, 6], + "type": [15, 2, 15, 8], + } + assert saved_obj["outputs"][0].lc.data == { + "type": [6, 4, 6, 10], + "outputBinding": [7, 4, 8, 6], + "id": [5, 2, 5, 6], + } cwl_v1_2.inserted_line_info = {} @@ -85,26 +98,27 @@ def test_type_dsl() -> None: obj = load_document_by_uri(str(path)) saved_obj = obj.save() assert isinstance(saved_obj, CommentedMap) - assert saved_obj.lc.data == {'cwlVersion': [1, 0, 1, 12], - 'baseCommand': [2, 0, 2, 13], - 'inputs': [4, 0, 5, 2], - 'outputs': [10, 0, 11, 2], - 'stdout': [16, 0, 16, 8], - 'id': [17, 0, 17, 4] - } - assert saved_obj['inputs'][0].lc.data == {'type': [6, 3, 6, 9], - 'default': [7, 3, 7, 12], - 'id': [5, 2, 5, 6] - } - assert saved_obj['inputs'][0]['type'] == ['null', 'File'] - assert saved_obj['inputs'][1].lc.data == {'id': [8, 2, 8, 6], - 'type': [9, 2, 9, 8] - } - assert saved_obj['outputs'][0].lc.data == {'type': [12, 4, 12, 10], - 'outputBinding': [13, 4, 14, 6], - 'id': [11, 2, 11, 6] - } - assert saved_obj["outputs"][0]['outputBinding'].lc.data == {'glob': [14, 6, 14, 12]} + assert saved_obj.lc.data == { + "cwlVersion": [1, 0, 1, 12], + "baseCommand": [2, 0, 2, 13], + "inputs": [4, 0, 5, 2], + "outputs": [10, 0, 11, 2], + "stdout": [16, 0, 16, 8], + "id": [17, 0, 17, 4], + } + assert saved_obj["inputs"][0].lc.data == { + "type": [6, 3, 6, 9], + "default": [7, 3, 7, 12], + "id": [5, 2, 5, 6], + } + assert saved_obj["inputs"][0]["type"] == ["null", "File"] + assert saved_obj["inputs"][1].lc.data == {"id": [8, 2, 8, 6], "type": [9, 2, 9, 8]} + assert saved_obj["outputs"][0].lc.data == { + "type": [12, 4, 12, 10], + "outputBinding": [13, 4, 14, 6], + "id": [11, 2, 11, 6], + } + assert saved_obj["outputs"][0]["outputBinding"].lc.data == {"glob": [14, 6, 14, 12]} def load_document_by_uri(path: str) -> Any: From 752dbabdf3ccb901de77ca6df9e83718f6fbfd30 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 15 May 2023 10:39:58 -0400 Subject: [PATCH 33/44] trying to pass tox tests --- schema_salad/metaschema.py | 109 ++++++++++-------- schema_salad/python_codegen_support.py | 3 +- schema_salad/tests/test_line_numbers.py | 6 +- .../tests/test_outputs_before_inputs.cwl | 17 +++ .../tests/test_secondary_files_dsl.cwl | 18 +++ schema_salad/tests/test_type_dsl.cwl | 17 +++ 6 files changed, 118 insertions(+), 52 deletions(-) create mode 100644 schema_salad/tests/test_outputs_before_inputs.cwl create mode 100644 schema_salad/tests/test_secondary_files_dsl.cwl create mode 100644 schema_salad/tests/test_type_dsl.cwl diff --git a/schema_salad/metaschema.py b/schema_salad/metaschema.py index 62a517294..cb7bae42d 100644 --- a/schema_salad/metaschema.py +++ b/schema_salad/metaschema.py @@ -441,6 +441,7 @@ def save( newdict = CommentedMap() new_keys = keys for key in val: + if doc: if key in doc: newdict.lc.add_kv_line_col(key, doc.lc.data[key]) @@ -453,13 +454,8 @@ def save( relative_uris=relative_uris, keys=new_keys, ) + return newdict - # newdict = {} - # for key in val: - # newdict[key] = save( - # val[key], top=False, base_url=base_url, relative_uris=relative_uris - # ) - # return newdict if val is None or isinstance(val, (int, float, bool, str)): return val raise Exception("Not Saveable: %s" % type(val)) @@ -915,7 +911,6 @@ def _document_load( addl_metadata=addl_metadata, ) - doc = copy.copy(doc) if "$namespaces" in doc: doc.pop("$namespaces") @@ -1215,7 +1210,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -1254,7 +1249,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, @@ -1265,7 +1260,9 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1278,7 +1275,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len, + max_len=max_len ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1447,7 +1444,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -1486,7 +1483,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, @@ -1497,7 +1494,9 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1510,7 +1509,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len, + max_len=max_len ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -1697,7 +1696,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -1736,7 +1735,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, @@ -1747,7 +1746,9 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1760,7 +1761,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len, + max_len=max_len ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1924,7 +1925,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -1963,7 +1964,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, @@ -1974,7 +1975,9 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1987,7 +1990,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len, + max_len=max_len ) if self.items is not None and "items" not in r: u = save_relative_uri(self.items, base_url, False, 2, relative_uris) @@ -2360,7 +2363,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -2399,7 +2402,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, @@ -2410,7 +2413,9 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -2423,7 +2428,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len, + max_len=max_len ) if self._id is not None and "_id" not in r: u = save_relative_uri(self._id, base_url, True, None, relative_uris) @@ -2727,7 +2732,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -2766,7 +2771,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, @@ -2777,7 +2782,9 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -2790,7 +2797,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len, + max_len=max_len ) if self.specializeFrom is not None and "specializeFrom" not in r: u = save_relative_uri( @@ -3042,7 +3049,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -3081,7 +3088,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, @@ -3092,7 +3099,9 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3105,7 +3114,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len, + max_len=max_len ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3579,7 +3588,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -3618,7 +3627,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, @@ -3629,7 +3638,9 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3642,7 +3653,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len, + max_len=max_len ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4214,7 +4225,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -4253,7 +4264,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, @@ -4264,7 +4275,9 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -4277,7 +4290,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len, + max_len=max_len ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4723,7 +4736,7 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None, + keys: Optional[List[Any]] = None ) -> CommentedMap: if keys is None: keys = [] @@ -4762,7 +4775,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, @@ -4773,7 +4786,9 @@ def save( # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -4786,7 +4801,7 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len, + max_len=max_len ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index d87bf8fad..b574118d5 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -433,7 +433,7 @@ def save( ) ) return r - + if isinstance(val, MutableMapping): newdict = CommentedMap() new_keys = keys @@ -908,7 +908,6 @@ def _document_load( addl_metadata=addl_metadata, ) - doc = copy.copy(doc) if "$namespaces" in doc: doc.pop("$namespaces") diff --git a/schema_salad/tests/test_line_numbers.py b/schema_salad/tests/test_line_numbers.py index 567afd5a9..3d40689dc 100644 --- a/schema_salad/tests/test_line_numbers.py +++ b/schema_salad/tests/test_line_numbers.py @@ -15,7 +15,7 @@ def test_secondary_files_dsl() -> None: """ Checks object is properly saving when dsl is used """ - t = "test_schema/test_secondary_files_dsl.cwl" + t = "test_secondary_files_dsl.cwl" path = get_data("tests/" + t) obj = load_document_by_uri(str(path)) saved_obj = obj.save() @@ -56,7 +56,7 @@ def test_outputs_before_inputs() -> None: """ Tests when output comes in cwl file before inputs """ - t = "test_schema/test_outputs_before_inputs.cwl" + t = "test_outputs_before_inputs.cwl" path = get_data("tests/" + t) obj = load_document_by_uri(str(path)) saved_obj = obj.save() @@ -93,7 +93,7 @@ def test_type_dsl() -> None: In this example, type for the input is File? which should expand to null, File. """ - t = "test_schema/test_type_dsl.cwl" + t = "test_type_dsl.cwl" path = get_data("tests/" + t) obj = load_document_by_uri(str(path)) saved_obj = obj.save() diff --git a/schema_salad/tests/test_outputs_before_inputs.cwl b/schema_salad/tests/test_outputs_before_inputs.cwl new file mode 100644 index 000000000..e1594c790 --- /dev/null +++ b/schema_salad/tests/test_outputs_before_inputs.cwl @@ -0,0 +1,17 @@ +class: CommandLineTool +cwlVersion: v1.2 +baseCommand: python3 + +outputs: + hello_output: + type: File + outputBinding: + glob: hello-out.txt + +inputs: + files: + type: File + default: "script.py" + other_file: File + +stdout: hello-out.txt \ No newline at end of file diff --git a/schema_salad/tests/test_secondary_files_dsl.cwl b/schema_salad/tests/test_secondary_files_dsl.cwl new file mode 100644 index 000000000..1f6c712a4 --- /dev/null +++ b/schema_salad/tests/test_secondary_files_dsl.cwl @@ -0,0 +1,18 @@ +class: CommandLineTool +cwlVersion: v1.2 +baseCommand: python3 + +inputs: + files: + type: File + default: "script.py" + other_file: File + +outputs: + hello_output: + type: File + secondaryFiles: ["inputB.txt", "inputC.txt?"] + outputBinding: + glob: hello-out.txt + +stdout: hello-out.txt diff --git a/schema_salad/tests/test_type_dsl.cwl b/schema_salad/tests/test_type_dsl.cwl new file mode 100644 index 000000000..5b822d812 --- /dev/null +++ b/schema_salad/tests/test_type_dsl.cwl @@ -0,0 +1,17 @@ +class: CommandLineTool +cwlVersion: v1.2 +baseCommand: python3 + +inputs: + files: + type: File? + default: "script.py" + other_file: File + +outputs: + hello_output: + type: File + outputBinding: + glob: hello-out.txt + +stdout: hello-out.txt From 5d198eec1ee009ea54d8844feae095be5965fae1 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 15 May 2023 16:53:35 -0400 Subject: [PATCH 34/44] updating to remove inserted_line_info from global variable --- schema_salad/python_codegen.py | 32 +++++++++++++++++--------- schema_salad/python_codegen_support.py | 9 ++++---- 2 files changed, 26 insertions(+), 15 deletions(-) diff --git a/schema_salad/python_codegen.py b/schema_salad/python_codegen.py index 49ffe6a0b..614f22369 100644 --- a/schema_salad/python_codegen.py +++ b/schema_salad/python_codegen.py @@ -274,13 +274,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -333,13 +335,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -405,6 +409,7 @@ def save( base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -416,7 +421,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -424,7 +429,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) """ ) @@ -443,6 +449,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -454,7 +461,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -462,7 +469,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) """ ) @@ -800,7 +808,7 @@ def declare_field( if self.{safename} is not None and "{fieldname}" not in r: u = save_relative_uri(self.{safename}, {baseurl}, {scoped_id}, {ref_scope}, relative_uris) r["{fieldname}"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -808,7 +816,8 @@ def declare_field( val=r.get("{fieldname}"), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) """.format( safename=self.safe_name(name), @@ -826,9 +835,9 @@ def declare_field( """ if self.{safename} is not None and "{fieldname}" not in r: r["{fieldname}"] = save( - self.{safename}, top=False, base_url={baseurl}, relative_uris=relative_uris + self.{safename}, top=False, base_url={baseurl}, relative_uris=relative_uris,inserted_line_info=inserted_line_info ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -836,7 +845,8 @@ def declare_field( val=r.get("{fieldname}"), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) """.format( safename=self.safe_name(name), diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index b574118d5..8c939c5c0 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -42,10 +42,7 @@ IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] - doc_line_info = CommentedMap() -inserted_line_info: Dict[int, int] = {} - class LoadingOptions: idx: IdxType @@ -245,6 +242,7 @@ def add_kv( max_len: int, cols: Dict[int, int], min_col: int = 0, + inserted_line_info: Dict[int, int] = {} ) -> int: """Add key value pair into Commented Map. @@ -389,6 +387,7 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> save_type: """Save a val of any type. @@ -412,7 +411,7 @@ def save( if isinstance(val, Saveable): return val.save( - top=top, base_url=base_url, relative_uris=relative_uris, keys=keys + top=top, base_url=base_url, relative_uris=relative_uris, keys=keys, inserted_line_info=inserted_line_info ) if isinstance(val, MutableSequence): r = CommentedSeq() @@ -430,6 +429,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=new_keys, + inserted_line_info=inserted_line_info ) ) return r @@ -450,6 +450,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=new_keys, + inserted_line_info=inserted_line_info, ) return newdict From 160f5594760a85a44d7bf607fd74a9772be5e594 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 15 May 2023 16:54:21 -0400 Subject: [PATCH 35/44] updating cwl codegen filesfor updated codegen --- schema_salad/metaschema.py | 79 +- schema_salad/tests/cwl_v1_0.py | 2692 ++++++++++++++------- schema_salad/tests/cwl_v1_1.py | 2919 +++++++++++++++-------- schema_salad/tests/cwl_v1_2.py | 2862 +++++++++++++++------- schema_salad/tests/test_line_numbers.py | 3 - 5 files changed, 5783 insertions(+), 2772 deletions(-) diff --git a/schema_salad/metaschema.py b/schema_salad/metaschema.py index cb7bae42d..0717aeac5 100644 --- a/schema_salad/metaschema.py +++ b/schema_salad/metaschema.py @@ -45,10 +45,7 @@ IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] - doc_line_info = CommentedMap() -inserted_line_info: Dict[int, int] = {} - class LoadingOptions: idx: IdxType @@ -248,6 +245,7 @@ def add_kv( max_len: int, cols: Dict[int, int], min_col: int = 0, + inserted_line_info: Dict[int, int] = {} ) -> int: """Add key value pair into Commented Map. @@ -392,6 +390,7 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> save_type: """Save a val of any type. @@ -415,7 +414,7 @@ def save( if isinstance(val, Saveable): return val.save( - top=top, base_url=base_url, relative_uris=relative_uris, keys=keys + top=top, base_url=base_url, relative_uris=relative_uris, keys=keys, inserted_line_info=inserted_line_info ) if isinstance(val, MutableSequence): r = CommentedSeq() @@ -433,6 +432,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=new_keys, + inserted_line_info=inserted_line_info ) ) return r @@ -453,6 +453,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=new_keys, + inserted_line_info=inserted_line_info, ) return newdict @@ -1210,7 +1211,8 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] @@ -1256,6 +1258,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -1275,7 +1278,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1444,7 +1448,8 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] @@ -1490,6 +1495,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -1509,7 +1515,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -1696,7 +1703,8 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] @@ -1742,6 +1750,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -1761,7 +1770,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1925,7 +1935,8 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] @@ -1971,6 +1982,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -1990,7 +2002,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.items is not None and "items" not in r: u = save_relative_uri(self.items, base_url, False, 2, relative_uris) @@ -2363,7 +2376,8 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] @@ -2409,6 +2423,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -2428,7 +2443,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self._id is not None and "_id" not in r: u = save_relative_uri(self._id, base_url, True, None, relative_uris) @@ -2732,7 +2748,8 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] @@ -2778,6 +2795,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -2797,7 +2815,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.specializeFrom is not None and "specializeFrom" not in r: u = save_relative_uri( @@ -3049,7 +3068,8 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] @@ -3095,6 +3115,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -3114,7 +3135,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3588,7 +3610,8 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] @@ -3634,6 +3657,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -3653,7 +3677,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4225,7 +4250,8 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] @@ -4271,6 +4297,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -4290,7 +4317,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4736,7 +4764,8 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] @@ -4782,6 +4811,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -4801,7 +4831,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) diff --git a/schema_salad/tests/cwl_v1_0.py b/schema_salad/tests/cwl_v1_0.py index 95a007e47..d1f94fccd 100644 --- a/schema_salad/tests/cwl_v1_0.py +++ b/schema_salad/tests/cwl_v1_0.py @@ -45,10 +45,7 @@ IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] - doc_line_info = CommentedMap() -inserted_line_info: Dict[int, int] = {} - class LoadingOptions: idx: IdxType @@ -138,9 +135,7 @@ def __init__( ) self.fetcher: Fetcher = DefaultFetcher({}, session) - self.cache = ( - self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} - ) + self.cache = self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} self.vocab = _vocab self.rvocab = _rvocab @@ -172,9 +167,7 @@ def graph(self) -> Graph: try: content = self.fetcher.fetch_text(fetchurl) except Exception as e: - _logger.warning( - "Could not load extension schema %s: %s", fetchurl, str(e) - ) + _logger.warning("Could not load extension schema %s: %s", fetchurl, str(e)) continue newGraph = Graph() err_msg = "unknown error" @@ -187,9 +180,7 @@ def graph(self) -> Graph: except (xml.sax.SAXParseException, TypeError, BadSyntax) as e: err_msg = str(e) else: - _logger.warning( - "Could not load extension schema %s: %s", fetchurl, err_msg - ) + _logger.warning("Could not load extension schema %s: %s", fetchurl, err_msg) self.cache[key] = graph return graph @@ -233,20 +224,16 @@ def load_field(val, fieldtype, baseuri, loadingOptions): ) loadingOptions.imports.append(url) return result - elif "$include" in val: + if "$include" in val: if loadingOptions.fileuri is None: raise SchemaSaladException("Cannot load $import without fileuri") - url = loadingOptions.fetcher.urljoin( - loadingOptions.fileuri, val["$include"] - ) + url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) val = loadingOptions.fetcher.fetch_text(url) loadingOptions.includes.append(url) return fieldtype.load(val, baseuri, loadingOptions) -save_type = Optional[ - Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str] -] +save_type = Optional[Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str]] def add_kv( @@ -258,6 +245,7 @@ def add_kv( max_len: int, cols: Dict[int, int], min_col: int = 0, + inserted_line_info: Dict[int, int] = {} ) -> int: """Add key value pair into Commented Map. @@ -402,6 +390,7 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> save_type: """Save a val of any type. @@ -425,7 +414,7 @@ def save( if isinstance(val, Saveable): return val.save( - top=top, base_url=base_url, relative_uris=relative_uris, keys=keys + top=top, base_url=base_url, relative_uris=relative_uris, keys=keys, inserted_line_info=inserted_line_info ) if isinstance(val, MutableSequence): r = CommentedSeq() @@ -443,17 +432,16 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=new_keys, + inserted_line_info=inserted_line_info ) ) return r - # return [ - # save(v, top=False, base_url=base_url, relative_uris=relative_uris) - # for v in val - # ] + if isinstance(val, MutableMapping): newdict = CommentedMap() new_keys = keys for key in val: + if doc: if key in doc: newdict.lc.add_kv_line_col(key, doc.lc.data[key]) @@ -465,14 +453,10 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=new_keys, + inserted_line_info=inserted_line_info, ) + return newdict - # newdict = {} - # for key in val: - # newdict[key] = save( - # val[key], top=False, base_url=base_url, relative_uris=relative_uris - # ) - # return newdict if val is None or isinstance(val, (int, float, bool, str)): return val raise Exception("Not Saveable: %s" % type(val)) @@ -529,10 +513,7 @@ def expand_url( split = urlsplit(url) if ( - ( - bool(split.scheme) - and split.scheme in loadingOptions.fetcher.supported_schemes() - ) + (bool(split.scheme) and split.scheme in loadingOptions.fetcher.supported_schemes()) or url.startswith("$(") or url.startswith("${") ): @@ -572,7 +553,7 @@ def expand_url( if url in loadingOptions.rvocab: return loadingOptions.rvocab[url] else: - raise ValidationException(f"Term '{url}' not in vocabulary") + raise ValidationException(f"Term {url!r} not in vocabulary") return url @@ -623,9 +604,7 @@ def load(self, doc, baseuri, loadingOptions, docRoot=None): errors = [] # type: List[SchemaSaladException] for i in range(0, len(doc)): try: - lf = load_field( - doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions - ) + lf = load_field(doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions) if isinstance(lf, MutableSequence): r.extend(lf) else: @@ -649,8 +628,7 @@ def load(self, doc, baseuri, loadingOptions, docRoot=None): # type: (Any, str, LoadingOptions, Optional[str]) -> Any if doc in self.symbols: return doc - else: - raise ValidationException(f"Expected one of {self.symbols}") + raise ValidationException(f"Expected one of {self.symbols}") def __repr__(self): # type: () -> str return self.name @@ -678,9 +656,7 @@ def load(self, doc, baseuri, loadingOptions, docRoot=None): new_dict["pattern"] = dict_copy.pop("pattern") else: raise ValidationException( - "Missing pattern in secondaryFiles specification entry: {}".format( - d - ) + f"Missing pattern in secondaryFiles specification entry: {d}" ) new_dict["required"] = ( dict_copy.pop("required") if "required" in dict_copy else None @@ -705,19 +681,13 @@ def load(self, doc, baseuri, loadingOptions, docRoot=None): new_dict["pattern"] = doc_copy.pop("pattern") else: raise ValidationException( - "Missing pattern in secondaryFiles specification entry: {}".format( - doc - ) + f"Missing pattern in secondaryFiles specification entry: {doc}" ) - new_dict["required"] = ( - doc_copy.pop("required") if "required" in doc_copy else None - ) + new_dict["required"] = doc_copy.pop("required") if "required" in doc_copy else None if len(doc_copy): raise ValidationException( - "Unallowed values in secondaryFiles specification entry: {}".format( - doc_copy - ) + f"Unallowed values in secondaryFiles specification entry: {doc_copy}" ) r.append(new_dict) @@ -833,9 +803,7 @@ def resolve( if m: group1 = m.group(1) assert group1 is not None # nosec - first = expand_url( - group1, baseuri, loadingOptions, False, True, self.refScope - ) + first = expand_url(group1, baseuri, loadingOptions, False, True, self.refScope) second = third = None if bool(m.group(2)): second = {"type": "array", "items": first} @@ -944,11 +912,6 @@ def _document_load( addl_metadata=addl_metadata, ) - # doc = { - # k: v - # for k, v in doc.items() - # if k not in ("$namespaces", "$schemas", "$base") - # } doc = copy.copy(doc) if "$namespaces" in doc: doc.pop("$namespaces") @@ -1000,10 +963,7 @@ def _document_load_by_url( doc_url, frg = urldefrag(url) text = loadingOptions.fetcher.fetch_text(doc_url) - if isinstance(text, bytes): - textIO = StringIO(text.decode("utf-8")) - else: - textIO = StringIO(text) + textIO = StringIO(text) textIO.name = str(doc_url) yaml = yaml_no_ts() result = yaml.load(textIO) @@ -1034,8 +994,7 @@ def file_uri(path, split_frag=False): # type: (str, bool) -> str frag = "" if urlpath.startswith("//"): return f"file:{urlpath}{frag}" - else: - return f"file://{urlpath}{frag}" + return f"file://{urlpath}{frag}" def prefix_url(url: str, namespaces: Dict[str, str]) -> str: @@ -1055,10 +1014,7 @@ def save_relative_uri( ) -> Any: """Convert any URI to a relative one, obeying the scoping rules.""" if isinstance(uri, MutableSequence): - return [ - save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) - for u in uri - ] + return [save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) for u in uri] elif isinstance(uri, str): if not relative_uris or uri == base_url: return uri @@ -1082,8 +1038,7 @@ def save_relative_uri( if urisplit.fragment.startswith(basefrag): return urisplit.fragment[len(basefrag) :] - else: - return urisplit.fragment + return urisplit.fragment return uri else: return save(uri, top=False, base_url=base_url, relative_uris=relative_uris) @@ -1167,7 +1122,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -1194,7 +1149,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -1211,7 +1166,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -1253,12 +1208,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -1291,23 +1248,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1316,11 +1276,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1329,6 +1290,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -1336,8 +1298,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1346,6 +1309,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -1353,8 +1317,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1363,6 +1328,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -1428,7 +1394,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `fields` field is not valid because:", + "the 'fields' field is not valid because:", SourceLine(_doc, "fields", str), [e], ) @@ -1445,7 +1411,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -1485,12 +1451,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -1523,23 +1491,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1548,12 +1519,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.fields is not None and "fields" not in r: r["fields"] = save( - self.fields, top=False, base_url=base_url, relative_uris=relative_uris + self.fields, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1562,12 +1538,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1576,6 +1557,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -1645,7 +1627,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", + "the 'symbols' field is not valid because:", SourceLine(_doc, "symbols", str), [e], ) @@ -1660,7 +1642,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -1700,12 +1682,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -1738,23 +1722,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1763,11 +1750,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) r["symbols"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1776,12 +1764,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1790,6 +1783,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -1854,7 +1848,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `items` field is not valid because:", + "the 'items' field is not valid because:", SourceLine(_doc, "items", str), [e], ) @@ -1869,7 +1863,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -1909,12 +1903,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -1947,23 +1943,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1972,12 +1971,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.items is not None and "items" not in r: r["items"] = save( - self.items, top=False, base_url=base_url, relative_uris=relative_uris + self.items, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1986,12 +1990,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2000,6 +2009,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -2186,7 +2196,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `location` field is not valid because:", + "the 'location' field is not valid because:", SourceLine(_doc, "location", str), [e], ) @@ -2204,7 +2214,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `path` field is not valid because:", + "the 'path' field is not valid because:", SourceLine(_doc, "path", str), [e], ) @@ -2222,7 +2232,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `basename` field is not valid because:", + "the 'basename' field is not valid because:", SourceLine(_doc, "basename", str), [e], ) @@ -2240,7 +2250,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dirname` field is not valid because:", + "the 'dirname' field is not valid because:", SourceLine(_doc, "dirname", str), [e], ) @@ -2258,7 +2268,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `nameroot` field is not valid because:", + "the 'nameroot' field is not valid because:", SourceLine(_doc, "nameroot", str), [e], ) @@ -2276,7 +2286,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `nameext` field is not valid because:", + "the 'nameext' field is not valid because:", SourceLine(_doc, "nameext", str), [e], ) @@ -2294,7 +2304,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `checksum` field is not valid because:", + "the 'checksum' field is not valid because:", SourceLine(_doc, "checksum", str), [e], ) @@ -2312,7 +2322,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `size` field is not valid because:", + "the 'size' field is not valid because:", SourceLine(_doc, "size", str), [e], ) @@ -2330,7 +2340,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -2348,7 +2358,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -2366,7 +2376,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `contents` field is not valid because:", + "the 'contents' field is not valid because:", SourceLine(_doc, "contents", str), [e], ) @@ -2417,12 +2427,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -2457,23 +2469,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2482,11 +2497,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) r["location"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2495,11 +2511,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.path is not None and "path" not in r: u = save_relative_uri(self.path, base_url, False, None, relative_uris) r["path"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2508,12 +2525,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.basename is not None and "basename" not in r: r["basename"] = save( - self.basename, top=False, base_url=base_url, relative_uris=relative_uris + self.basename, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2522,12 +2544,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dirname is not None and "dirname" not in r: r["dirname"] = save( - self.dirname, top=False, base_url=base_url, relative_uris=relative_uris + self.dirname, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2536,12 +2563,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.nameroot is not None and "nameroot" not in r: r["nameroot"] = save( - self.nameroot, top=False, base_url=base_url, relative_uris=relative_uris + self.nameroot, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2550,12 +2582,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.nameext is not None and "nameext" not in r: r["nameext"] = save( - self.nameext, top=False, base_url=base_url, relative_uris=relative_uris + self.nameext, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2564,12 +2601,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.checksum is not None and "checksum" not in r: r["checksum"] = save( - self.checksum, top=False, base_url=base_url, relative_uris=relative_uris + self.checksum, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2578,12 +2620,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.size is not None and "size" not in r: r["size"] = save( - self.size, top=False, base_url=base_url, relative_uris=relative_uris + self.size, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2592,6 +2639,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -2599,8 +2647,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2609,11 +2658,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, base_url, True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2622,12 +2672,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.contents is not None and "contents" not in r: r["contents"] = save( - self.contents, top=False, base_url=base_url, relative_uris=relative_uris + self.contents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2636,6 +2691,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -2780,7 +2836,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `location` field is not valid because:", + "the 'location' field is not valid because:", SourceLine(_doc, "location", str), [e], ) @@ -2798,7 +2854,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `path` field is not valid because:", + "the 'path' field is not valid because:", SourceLine(_doc, "path", str), [e], ) @@ -2816,7 +2872,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `basename` field is not valid because:", + "the 'basename' field is not valid because:", SourceLine(_doc, "basename", str), [e], ) @@ -2834,7 +2890,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `listing` field is not valid because:", + "the 'listing' field is not valid because:", SourceLine(_doc, "listing", str), [e], ) @@ -2878,12 +2934,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -2918,23 +2976,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2943,11 +3004,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) r["location"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2956,11 +3018,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.path is not None and "path" not in r: u = save_relative_uri(self.path, base_url, False, None, relative_uris) r["path"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2969,12 +3032,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.basename is not None and "basename" not in r: r["basename"] = save( - self.basename, top=False, base_url=base_url, relative_uris=relative_uris + self.basename, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2983,12 +3051,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.listing is not None and "listing" not in r: r["listing"] = save( - self.listing, top=False, base_url=base_url, relative_uris=relative_uris + self.listing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2997,6 +3070,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -3103,7 +3177,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -3130,7 +3204,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -3147,7 +3221,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -3163,7 +3237,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -3181,7 +3255,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -3227,12 +3301,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -3265,23 +3341,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3290,11 +3369,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3303,6 +3383,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -3310,8 +3391,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3320,6 +3402,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -3327,8 +3410,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3337,6 +3421,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -3344,8 +3429,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3354,6 +3440,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -3361,8 +3448,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3371,6 +3459,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -3445,7 +3534,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -3472,7 +3561,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `fields` field is not valid because:", + "the 'fields' field is not valid because:", SourceLine(_doc, "fields", str), [e], ) @@ -3489,7 +3578,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -3505,7 +3594,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -3550,12 +3639,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -3588,23 +3679,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3613,11 +3707,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3626,6 +3721,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -3633,8 +3729,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3643,6 +3740,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -3650,8 +3748,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3660,6 +3759,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -3667,8 +3767,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3677,6 +3778,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -3754,7 +3856,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -3780,7 +3882,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", + "the 'symbols' field is not valid because:", SourceLine(_doc, "symbols", str), [e], ) @@ -3795,7 +3897,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -3811,7 +3913,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -3829,7 +3931,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -3875,12 +3977,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -3913,23 +4017,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3938,11 +4045,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3951,13 +4059,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri( self.symbols, str(self.name), True, None, relative_uris ) r["symbols"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3966,6 +4075,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -3973,8 +4083,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3983,6 +4094,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -3990,8 +4102,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4000,6 +4113,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -4007,8 +4121,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4017,6 +4132,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -4090,7 +4206,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `items` field is not valid because:", + "the 'items' field is not valid because:", SourceLine(_doc, "items", str), [e], ) @@ -4105,7 +4221,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -4121,7 +4237,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -4139,7 +4255,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -4183,12 +4299,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -4221,23 +4339,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4246,12 +4367,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.items is not None and "items" not in r: r["items"] = save( - self.items, top=False, base_url=base_url, relative_uris=relative_uris + self.items, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4260,12 +4386,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4274,12 +4405,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( - self.label, top=False, base_url=base_url, relative_uris=relative_uris + self.label, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4288,6 +4424,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -4295,8 +4432,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4305,6 +4443,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -4379,7 +4518,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -4406,7 +4545,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -4423,7 +4562,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -4439,7 +4578,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", + "the 'outputBinding' field is not valid because:", SourceLine(_doc, "outputBinding", str), [e], ) @@ -4484,12 +4623,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -4522,23 +4663,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4547,11 +4691,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4560,6 +4705,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -4567,8 +4713,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4577,6 +4724,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4584,8 +4732,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4594,6 +4743,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -4601,8 +4751,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4611,6 +4762,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -4682,7 +4834,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `fields` field is not valid because:", + "the 'fields' field is not valid because:", SourceLine(_doc, "fields", str), [e], ) @@ -4699,7 +4851,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -4715,7 +4867,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -4758,12 +4910,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -4796,23 +4950,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4821,12 +4978,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.fields is not None and "fields" not in r: r["fields"] = save( - self.fields, top=False, base_url=base_url, relative_uris=relative_uris + self.fields, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4835,12 +4997,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4849,12 +5016,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( - self.label, top=False, base_url=base_url, relative_uris=relative_uris + self.label, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4863,6 +5035,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -4936,7 +5109,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", + "the 'symbols' field is not valid because:", SourceLine(_doc, "symbols", str), [e], ) @@ -4951,7 +5124,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -4967,7 +5140,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -4985,7 +5158,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", + "the 'outputBinding' field is not valid because:", SourceLine(_doc, "outputBinding", str), [e], ) @@ -5029,12 +5202,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -5067,23 +5242,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5092,11 +5270,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) r["symbols"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5105,12 +5284,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5119,12 +5303,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( - self.label, top=False, base_url=base_url, relative_uris=relative_uris + self.label, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5133,6 +5322,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -5140,8 +5330,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5150,6 +5341,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -5223,7 +5415,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `items` field is not valid because:", + "the 'items' field is not valid because:", SourceLine(_doc, "items", str), [e], ) @@ -5238,7 +5430,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -5254,7 +5446,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -5272,7 +5464,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", + "the 'outputBinding' field is not valid because:", SourceLine(_doc, "outputBinding", str), [e], ) @@ -5316,12 +5508,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -5354,23 +5548,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5379,12 +5576,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.items is not None and "items" not in r: r["items"] = save( - self.items, top=False, base_url=base_url, relative_uris=relative_uris + self.items, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5393,12 +5595,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5407,12 +5614,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( - self.label, top=False, base_url=base_url, relative_uris=relative_uris + self.label, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5421,6 +5633,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -5428,8 +5641,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5438,6 +5652,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -5539,7 +5754,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -5566,7 +5781,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -5584,7 +5799,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -5602,7 +5817,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -5620,7 +5835,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -5638,7 +5853,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -5656,7 +5871,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -5674,7 +5889,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `default` field is not valid because:", + "the 'default' field is not valid because:", SourceLine(_doc, "default", str), [e], ) @@ -5692,7 +5907,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -5742,12 +5957,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -5764,22 +5981,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -5804,23 +6016,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5829,11 +6044,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5842,6 +6058,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -5849,8 +6066,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5859,6 +6077,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -5866,8 +6085,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5876,6 +6096,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -5883,8 +6104,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5893,12 +6115,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5907,11 +6134,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5920,6 +6148,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -5927,8 +6156,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5937,6 +6167,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -5944,8 +6175,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5954,12 +6186,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5968,6 +6205,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -6073,7 +6311,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -6100,7 +6338,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -6118,7 +6356,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -6136,7 +6374,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -6154,7 +6392,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -6172,7 +6410,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", + "the 'outputBinding' field is not valid because:", SourceLine(_doc, "outputBinding", str), [e], ) @@ -6190,7 +6428,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -6238,12 +6476,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -6260,22 +6500,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -6300,23 +6535,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6325,11 +6563,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6338,6 +6577,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -6345,8 +6585,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6355,6 +6596,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -6362,8 +6604,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6372,6 +6615,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -6379,8 +6623,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6389,12 +6634,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6403,6 +6653,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -6410,8 +6661,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6420,11 +6672,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6433,6 +6686,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -6547,7 +6801,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `expressionLib` field is not valid because:", + "the 'expressionLib' field is not valid because:", SourceLine(_doc, "expressionLib", str), [e], ) @@ -6590,12 +6844,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -6630,23 +6886,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6655,6 +6914,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.expressionLib is not None and "expressionLib" not in r: r["expressionLib"] = save( @@ -6662,8 +6922,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6672,6 +6933,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -6750,7 +7012,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `types` field is not valid because:", + "the 'types' field is not valid because:", SourceLine(_doc, "types", str), [e], ) @@ -6789,12 +7051,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -6829,23 +7093,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6854,12 +7121,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.types is not None and "types" not in r: r["types"] = save( - self.types, top=False, base_url=base_url, relative_uris=relative_uris + self.types, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6868,6 +7140,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -6941,7 +7214,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `envName` field is not valid because:", + "the 'envName' field is not valid because:", SourceLine(_doc, "envName", str), [e], ) @@ -6956,7 +7229,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `envValue` field is not valid because:", + "the 'envValue' field is not valid because:", SourceLine(_doc, "envValue", str), [e], ) @@ -6996,12 +7269,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -7034,23 +7309,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7059,12 +7337,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.envName is not None and "envName" not in r: r["envName"] = save( - self.envName, top=False, base_url=base_url, relative_uris=relative_uris + self.envName, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7073,12 +7356,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.envValue is not None and "envValue" not in r: r["envValue"] = save( - self.envValue, top=False, base_url=base_url, relative_uris=relative_uris + self.envValue, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7087,6 +7375,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -7219,7 +7508,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -7237,7 +7526,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `position` field is not valid because:", + "the 'position' field is not valid because:", SourceLine(_doc, "position", str), [e], ) @@ -7255,7 +7544,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `prefix` field is not valid because:", + "the 'prefix' field is not valid because:", SourceLine(_doc, "prefix", str), [e], ) @@ -7273,7 +7562,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `separate` field is not valid because:", + "the 'separate' field is not valid because:", SourceLine(_doc, "separate", str), [e], ) @@ -7291,7 +7580,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `itemSeparator` field is not valid because:", + "the 'itemSeparator' field is not valid because:", SourceLine(_doc, "itemSeparator", str), [e], ) @@ -7309,7 +7598,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `valueFrom` field is not valid because:", + "the 'valueFrom' field is not valid because:", SourceLine(_doc, "valueFrom", str), [e], ) @@ -7327,7 +7616,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `shellQuote` field is not valid because:", + "the 'shellQuote' field is not valid because:", SourceLine(_doc, "shellQuote", str), [e], ) @@ -7374,12 +7663,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -7412,23 +7703,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7437,6 +7731,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -7444,8 +7739,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7454,12 +7750,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.position is not None and "position" not in r: r["position"] = save( - self.position, top=False, base_url=base_url, relative_uris=relative_uris + self.position, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7468,12 +7769,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.prefix is not None and "prefix" not in r: r["prefix"] = save( - self.prefix, top=False, base_url=base_url, relative_uris=relative_uris + self.prefix, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7482,12 +7788,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.separate is not None and "separate" not in r: r["separate"] = save( - self.separate, top=False, base_url=base_url, relative_uris=relative_uris + self.separate, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7496,6 +7807,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.itemSeparator is not None and "itemSeparator" not in r: r["itemSeparator"] = save( @@ -7503,8 +7815,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7513,6 +7826,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.valueFrom is not None and "valueFrom" not in r: r["valueFrom"] = save( @@ -7520,8 +7834,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7530,6 +7845,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.shellQuote is not None and "shellQuote" not in r: r["shellQuote"] = save( @@ -7537,8 +7853,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7547,6 +7864,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -7642,7 +7960,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `glob` field is not valid because:", + "the 'glob' field is not valid because:", SourceLine(_doc, "glob", str), [e], ) @@ -7660,7 +7978,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -7678,7 +7996,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputEval` field is not valid because:", + "the 'outputEval' field is not valid because:", SourceLine(_doc, "outputEval", str), [e], ) @@ -7721,12 +8039,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -7759,23 +8079,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7784,12 +8107,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.glob is not None and "glob" not in r: r["glob"] = save( - self.glob, top=False, base_url=base_url, relative_uris=relative_uris + self.glob, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7798,6 +8126,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -7805,8 +8134,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7815,6 +8145,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputEval is not None and "outputEval" not in r: r["outputEval"] = save( @@ -7822,8 +8153,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7832,6 +8164,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -7909,7 +8242,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -7936,7 +8269,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -7953,7 +8286,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -7969,7 +8302,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -7987,7 +8320,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -8035,12 +8368,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -8073,23 +8408,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8098,11 +8436,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8111,6 +8450,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -8118,8 +8458,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8128,6 +8469,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -8135,8 +8477,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8145,6 +8488,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -8152,8 +8496,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8162,6 +8507,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -8169,8 +8515,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8179,6 +8526,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -8253,7 +8601,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -8280,7 +8628,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `fields` field is not valid because:", + "the 'fields' field is not valid because:", SourceLine(_doc, "fields", str), [e], ) @@ -8297,7 +8645,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -8313,7 +8661,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -8360,12 +8708,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -8398,23 +8748,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8423,11 +8776,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8436,6 +8790,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -8443,8 +8798,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8453,6 +8809,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -8460,8 +8817,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8470,6 +8828,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -8477,8 +8836,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8487,6 +8847,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -8564,7 +8925,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -8590,7 +8951,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", + "the 'symbols' field is not valid because:", SourceLine(_doc, "symbols", str), [e], ) @@ -8605,7 +8966,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -8621,7 +8982,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -8639,7 +9000,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -8687,12 +9048,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -8725,23 +9088,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8750,11 +9116,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8763,13 +9130,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri( self.symbols, str(self.name), True, None, relative_uris ) r["symbols"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8778,6 +9146,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -8785,8 +9154,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8795,6 +9165,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -8802,8 +9173,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8812,6 +9184,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -8819,8 +9192,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8829,6 +9203,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -8902,7 +9277,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `items` field is not valid because:", + "the 'items' field is not valid because:", SourceLine(_doc, "items", str), [e], ) @@ -8917,7 +9292,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -8933,7 +9308,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -8951,7 +9326,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -8997,12 +9372,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -9035,23 +9412,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9060,12 +9440,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.items is not None and "items" not in r: r["items"] = save( - self.items, top=False, base_url=base_url, relative_uris=relative_uris + self.items, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9074,12 +9459,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9088,12 +9478,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( - self.label, top=False, base_url=base_url, relative_uris=relative_uris + self.label, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9102,6 +9497,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -9109,8 +9505,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9119,6 +9516,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -9193,7 +9591,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -9220,7 +9618,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -9237,7 +9635,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -9253,7 +9651,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", + "the 'outputBinding' field is not valid because:", SourceLine(_doc, "outputBinding", str), [e], ) @@ -9300,12 +9698,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -9338,23 +9738,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9363,11 +9766,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9376,6 +9780,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -9383,8 +9788,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9393,6 +9799,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -9400,8 +9807,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9410,6 +9818,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -9417,8 +9826,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9427,6 +9837,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -9501,7 +9912,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -9528,7 +9939,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `fields` field is not valid because:", + "the 'fields' field is not valid because:", SourceLine(_doc, "fields", str), [e], ) @@ -9545,7 +9956,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -9561,7 +9972,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -9608,12 +10019,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -9646,23 +10059,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9671,11 +10087,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9684,6 +10101,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -9691,8 +10109,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9701,6 +10120,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -9708,8 +10128,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9718,6 +10139,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -9725,8 +10147,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9735,6 +10158,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -9808,7 +10232,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", + "the 'symbols' field is not valid because:", SourceLine(_doc, "symbols", str), [e], ) @@ -9823,7 +10247,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -9839,7 +10263,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -9857,7 +10281,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", + "the 'outputBinding' field is not valid because:", SourceLine(_doc, "outputBinding", str), [e], ) @@ -9903,12 +10327,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -9941,23 +10367,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9966,11 +10395,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) r["symbols"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9979,12 +10409,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9993,12 +10428,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( - self.label, top=False, base_url=base_url, relative_uris=relative_uris + self.label, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10007,6 +10447,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -10014,8 +10455,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10024,6 +10466,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -10097,7 +10540,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `items` field is not valid because:", + "the 'items' field is not valid because:", SourceLine(_doc, "items", str), [e], ) @@ -10112,7 +10555,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -10128,7 +10571,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -10146,7 +10589,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", + "the 'outputBinding' field is not valid because:", SourceLine(_doc, "outputBinding", str), [e], ) @@ -10192,12 +10635,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -10230,23 +10675,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10255,12 +10703,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.items is not None and "items" not in r: r["items"] = save( - self.items, top=False, base_url=base_url, relative_uris=relative_uris + self.items, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10269,12 +10722,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10283,12 +10741,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( - self.label, top=False, base_url=base_url, relative_uris=relative_uris + self.label, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10297,6 +10760,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -10304,8 +10768,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10314,6 +10779,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -10419,7 +10885,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -10446,7 +10912,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -10464,7 +10930,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -10482,7 +10948,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -10500,7 +10966,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -10518,7 +10984,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -10536,7 +11002,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -10554,7 +11020,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `default` field is not valid because:", + "the 'default' field is not valid because:", SourceLine(_doc, "default", str), [e], ) @@ -10572,7 +11038,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -10622,12 +11088,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -10644,22 +11112,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -10684,23 +11147,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10709,11 +11175,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10722,6 +11189,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -10729,8 +11197,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10739,6 +11208,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -10746,8 +11216,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10756,6 +11227,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -10763,8 +11235,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10773,12 +11246,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10787,11 +11265,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10800,6 +11279,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -10807,8 +11287,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10817,6 +11298,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -10824,8 +11306,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10834,12 +11317,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10848,6 +11336,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -10961,7 +11450,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -10988,7 +11477,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -11006,7 +11495,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -11024,7 +11513,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -11042,7 +11531,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -11060,7 +11549,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", + "the 'outputBinding' field is not valid because:", SourceLine(_doc, "outputBinding", str), [e], ) @@ -11078,7 +11567,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -11096,7 +11585,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -11147,12 +11636,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -11169,22 +11660,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -11209,23 +11695,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11234,11 +11723,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11247,6 +11737,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -11254,8 +11745,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11264,6 +11756,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -11271,8 +11764,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11281,6 +11775,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -11288,8 +11783,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11298,12 +11794,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11312,6 +11813,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -11319,8 +11821,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11329,11 +11832,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11342,12 +11846,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11356,6 +11865,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -11508,7 +12018,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -11534,7 +12044,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputs` field is not valid because:", + "the 'inputs' field is not valid because:", SourceLine(_doc, "inputs", str), [e], ) @@ -11549,7 +12059,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputs` field is not valid because:", + "the 'outputs' field is not valid because:", SourceLine(_doc, "outputs", str), [e], ) @@ -11565,7 +12075,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `requirements` field is not valid because:", + "the 'requirements' field is not valid because:", SourceLine(_doc, "requirements", str), [e], ) @@ -11583,7 +12093,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `hints` field is not valid because:", + "the 'hints' field is not valid because:", SourceLine(_doc, "hints", str), [e], ) @@ -11601,7 +12111,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -11619,7 +12129,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -11637,7 +12147,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", + "the 'cwlVersion' field is not valid because:", SourceLine(_doc, "cwlVersion", str), [e], ) @@ -11655,7 +12165,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `baseCommand` field is not valid because:", + "the 'baseCommand' field is not valid because:", SourceLine(_doc, "baseCommand", str), [e], ) @@ -11673,7 +12183,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `arguments` field is not valid because:", + "the 'arguments' field is not valid because:", SourceLine(_doc, "arguments", str), [e], ) @@ -11691,7 +12201,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `stdin` field is not valid because:", + "the 'stdin' field is not valid because:", SourceLine(_doc, "stdin", str), [e], ) @@ -11709,7 +12219,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `stderr` field is not valid because:", + "the 'stderr' field is not valid because:", SourceLine(_doc, "stderr", str), [e], ) @@ -11727,7 +12237,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `stdout` field is not valid because:", + "the 'stdout' field is not valid because:", SourceLine(_doc, "stdout", str), [e], ) @@ -11745,7 +12255,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `successCodes` field is not valid because:", + "the 'successCodes' field is not valid because:", SourceLine(_doc, "successCodes", str), [e], ) @@ -11763,7 +12273,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `temporaryFailCodes` field is not valid because:", + "the 'temporaryFailCodes' field is not valid because:", SourceLine(_doc, "temporaryFailCodes", str), [e], ) @@ -11781,7 +12291,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `permanentFailCodes` field is not valid because:", + "the 'permanentFailCodes' field is not valid because:", SourceLine(_doc, "permanentFailCodes", str), [e], ) @@ -11838,12 +12348,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -11860,22 +12372,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -11902,23 +12409,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11927,11 +12437,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11940,6 +12451,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -11947,8 +12459,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11957,6 +12470,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -11964,8 +12478,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11974,6 +12489,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -11981,8 +12497,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11991,6 +12508,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -11998,8 +12516,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12008,6 +12527,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -12015,8 +12535,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12025,12 +12546,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12039,13 +12565,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.cwlVersion is not None and "cwlVersion" not in r: u = save_relative_uri( self.cwlVersion, str(self.id), False, None, relative_uris ) r["cwlVersion"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12054,6 +12581,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.baseCommand is not None and "baseCommand" not in r: r["baseCommand"] = save( @@ -12061,8 +12589,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12071,6 +12600,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.arguments is not None and "arguments" not in r: r["arguments"] = save( @@ -12078,8 +12608,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12088,6 +12619,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.stdin is not None and "stdin" not in r: r["stdin"] = save( @@ -12095,8 +12627,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12105,6 +12638,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.stderr is not None and "stderr" not in r: r["stderr"] = save( @@ -12112,8 +12646,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12122,6 +12657,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.stdout is not None and "stdout" not in r: r["stdout"] = save( @@ -12129,8 +12665,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12139,6 +12676,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.successCodes is not None and "successCodes" not in r: r["successCodes"] = save( @@ -12146,8 +12684,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12156,6 +12695,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.temporaryFailCodes is not None and "temporaryFailCodes" not in r: r["temporaryFailCodes"] = save( @@ -12163,8 +12703,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12173,6 +12714,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.permanentFailCodes is not None and "permanentFailCodes" not in r: r["permanentFailCodes"] = save( @@ -12180,8 +12722,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12190,6 +12733,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -12343,7 +12887,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerPull` field is not valid because:", + "the 'dockerPull' field is not valid because:", SourceLine(_doc, "dockerPull", str), [e], ) @@ -12361,7 +12905,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerLoad` field is not valid because:", + "the 'dockerLoad' field is not valid because:", SourceLine(_doc, "dockerLoad", str), [e], ) @@ -12379,7 +12923,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerFile` field is not valid because:", + "the 'dockerFile' field is not valid because:", SourceLine(_doc, "dockerFile", str), [e], ) @@ -12397,7 +12941,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerImport` field is not valid because:", + "the 'dockerImport' field is not valid because:", SourceLine(_doc, "dockerImport", str), [e], ) @@ -12415,7 +12959,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerImageId` field is not valid because:", + "the 'dockerImageId' field is not valid because:", SourceLine(_doc, "dockerImageId", str), [e], ) @@ -12433,7 +12977,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerOutputDirectory` field is not valid because:", + "the 'dockerOutputDirectory' field is not valid because:", SourceLine(_doc, "dockerOutputDirectory", str), [e], ) @@ -12479,12 +13023,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -12519,23 +13065,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12544,6 +13093,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.dockerPull is not None and "dockerPull" not in r: r["dockerPull"] = save( @@ -12551,8 +13101,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12561,6 +13112,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dockerLoad is not None and "dockerLoad" not in r: r["dockerLoad"] = save( @@ -12568,8 +13120,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12578,6 +13131,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dockerFile is not None and "dockerFile" not in r: r["dockerFile"] = save( @@ -12585,8 +13139,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12595,6 +13150,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dockerImport is not None and "dockerImport" not in r: r["dockerImport"] = save( @@ -12602,8 +13158,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12612,6 +13169,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dockerImageId is not None and "dockerImageId" not in r: r["dockerImageId"] = save( @@ -12619,8 +13177,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12629,6 +13188,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dockerOutputDirectory is not None and "dockerOutputDirectory" not in r: r["dockerOutputDirectory"] = save( @@ -12636,8 +13196,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12646,6 +13207,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -12729,7 +13291,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `packages` field is not valid because:", + "the 'packages' field is not valid because:", SourceLine(_doc, "packages", str), [e], ) @@ -12768,12 +13330,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -12808,23 +13372,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12833,12 +13400,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.packages is not None and "packages" not in r: r["packages"] = save( - self.packages, top=False, base_url=base_url, relative_uris=relative_uris + self.packages, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12847,6 +13419,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -12917,7 +13490,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `package` field is not valid because:", + "the 'package' field is not valid because:", SourceLine(_doc, "package", str), [e], ) @@ -12933,7 +13506,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `version` field is not valid because:", + "the 'version' field is not valid because:", SourceLine(_doc, "version", str), [e], ) @@ -12951,7 +13524,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `specs` field is not valid because:", + "the 'specs' field is not valid because:", SourceLine(_doc, "specs", str), [e], ) @@ -12994,12 +13567,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -13032,23 +13607,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13057,12 +13635,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.package is not None and "package" not in r: r["package"] = save( - self.package, top=False, base_url=base_url, relative_uris=relative_uris + self.package, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13071,12 +13654,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.version is not None and "version" not in r: r["version"] = save( - self.version, top=False, base_url=base_url, relative_uris=relative_uris + self.version, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13085,11 +13673,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.specs is not None and "specs" not in r: u = save_relative_uri(self.specs, base_url, False, None, relative_uris) r["specs"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13098,6 +13687,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -13177,7 +13767,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `entryname` field is not valid because:", + "the 'entryname' field is not valid because:", SourceLine(_doc, "entryname", str), [e], ) @@ -13194,7 +13784,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `entry` field is not valid because:", + "the 'entry' field is not valid because:", SourceLine(_doc, "entry", str), [e], ) @@ -13210,7 +13800,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `writable` field is not valid because:", + "the 'writable' field is not valid because:", SourceLine(_doc, "writable", str), [e], ) @@ -13253,12 +13843,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -13291,23 +13883,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13316,6 +13911,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.entryname is not None and "entryname" not in r: r["entryname"] = save( @@ -13323,8 +13919,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13333,12 +13930,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.entry is not None and "entry" not in r: r["entry"] = save( - self.entry, top=False, base_url=base_url, relative_uris=relative_uris + self.entry, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13347,12 +13949,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.writable is not None and "writable" not in r: r["writable"] = save( - self.writable, top=False, base_url=base_url, relative_uris=relative_uris + self.writable, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13361,6 +13968,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -13432,7 +14040,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `listing` field is not valid because:", + "the 'listing' field is not valid because:", SourceLine(_doc, "listing", str), [e], ) @@ -13473,12 +14081,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -13513,23 +14123,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13538,12 +14151,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.listing is not None and "listing" not in r: r["listing"] = save( - self.listing, top=False, base_url=base_url, relative_uris=relative_uris + self.listing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13552,6 +14170,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -13625,7 +14244,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `envDef` field is not valid because:", + "the 'envDef' field is not valid because:", SourceLine(_doc, "envDef", str), [e], ) @@ -13664,12 +14283,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -13704,23 +14325,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13729,12 +14353,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.envDef is not None and "envDef" not in r: r["envDef"] = save( - self.envDef, top=False, base_url=base_url, relative_uris=relative_uris + self.envDef, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13743,6 +14372,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -13842,12 +14472,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -13882,23 +14514,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13907,6 +14542,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) # top refers to the directory level @@ -14035,7 +14671,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `coresMin` field is not valid because:", + "the 'coresMin' field is not valid because:", SourceLine(_doc, "coresMin", str), [e], ) @@ -14053,7 +14689,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `coresMax` field is not valid because:", + "the 'coresMax' field is not valid because:", SourceLine(_doc, "coresMax", str), [e], ) @@ -14071,7 +14707,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `ramMin` field is not valid because:", + "the 'ramMin' field is not valid because:", SourceLine(_doc, "ramMin", str), [e], ) @@ -14089,7 +14725,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `ramMax` field is not valid because:", + "the 'ramMax' field is not valid because:", SourceLine(_doc, "ramMax", str), [e], ) @@ -14107,7 +14743,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `tmpdirMin` field is not valid because:", + "the 'tmpdirMin' field is not valid because:", SourceLine(_doc, "tmpdirMin", str), [e], ) @@ -14125,7 +14761,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `tmpdirMax` field is not valid because:", + "the 'tmpdirMax' field is not valid because:", SourceLine(_doc, "tmpdirMax", str), [e], ) @@ -14143,7 +14779,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outdirMin` field is not valid because:", + "the 'outdirMin' field is not valid because:", SourceLine(_doc, "outdirMin", str), [e], ) @@ -14161,7 +14797,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outdirMax` field is not valid because:", + "the 'outdirMax' field is not valid because:", SourceLine(_doc, "outdirMax", str), [e], ) @@ -14209,12 +14845,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -14249,23 +14887,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14274,12 +14915,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.coresMin is not None and "coresMin" not in r: r["coresMin"] = save( - self.coresMin, top=False, base_url=base_url, relative_uris=relative_uris + self.coresMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14288,12 +14934,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.coresMax is not None and "coresMax" not in r: r["coresMax"] = save( - self.coresMax, top=False, base_url=base_url, relative_uris=relative_uris + self.coresMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14302,12 +14953,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.ramMin is not None and "ramMin" not in r: r["ramMin"] = save( - self.ramMin, top=False, base_url=base_url, relative_uris=relative_uris + self.ramMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14316,12 +14972,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.ramMax is not None and "ramMax" not in r: r["ramMax"] = save( - self.ramMax, top=False, base_url=base_url, relative_uris=relative_uris + self.ramMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14330,6 +14991,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.tmpdirMin is not None and "tmpdirMin" not in r: r["tmpdirMin"] = save( @@ -14337,8 +14999,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14347,6 +15010,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.tmpdirMax is not None and "tmpdirMax" not in r: r["tmpdirMax"] = save( @@ -14354,8 +15018,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14364,6 +15029,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outdirMin is not None and "outdirMin" not in r: r["outdirMin"] = save( @@ -14371,8 +15037,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14381,6 +15048,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outdirMax is not None and "outdirMax" not in r: r["outdirMax"] = save( @@ -14388,8 +15056,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14398,6 +15067,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -14507,7 +15177,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -14534,7 +15204,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -14552,7 +15222,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -14570,7 +15240,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -14588,7 +15258,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -14606,7 +15276,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", + "the 'outputBinding' field is not valid because:", SourceLine(_doc, "outputBinding", str), [e], ) @@ -14624,7 +15294,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -14642,7 +15312,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -14693,12 +15363,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -14715,22 +15387,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -14755,23 +15422,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14780,11 +15450,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14793,6 +15464,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -14800,8 +15472,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14810,6 +15483,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -14817,8 +15491,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14827,6 +15502,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -14834,8 +15510,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14844,12 +15521,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14858,6 +15540,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -14865,8 +15548,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14875,11 +15559,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14888,12 +15573,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14902,6 +15592,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -15026,7 +15717,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -15052,7 +15743,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputs` field is not valid because:", + "the 'inputs' field is not valid because:", SourceLine(_doc, "inputs", str), [e], ) @@ -15067,7 +15758,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputs` field is not valid because:", + "the 'outputs' field is not valid because:", SourceLine(_doc, "outputs", str), [e], ) @@ -15083,7 +15774,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `requirements` field is not valid because:", + "the 'requirements' field is not valid because:", SourceLine(_doc, "requirements", str), [e], ) @@ -15101,7 +15792,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `hints` field is not valid because:", + "the 'hints' field is not valid because:", SourceLine(_doc, "hints", str), [e], ) @@ -15119,7 +15810,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -15137,7 +15828,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -15155,7 +15846,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", + "the 'cwlVersion' field is not valid because:", SourceLine(_doc, "cwlVersion", str), [e], ) @@ -15172,7 +15863,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `expression` field is not valid because:", + "the 'expression' field is not valid because:", SourceLine(_doc, "expression", str), [e], ) @@ -15220,12 +15911,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -15242,22 +15935,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -15284,23 +15972,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15309,11 +16000,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15322,6 +16014,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -15329,8 +16022,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15339,6 +16033,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -15346,8 +16041,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15356,6 +16052,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -15363,8 +16060,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15373,6 +16071,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -15380,8 +16079,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15390,6 +16090,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -15397,8 +16098,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15407,12 +16109,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15421,13 +16128,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.cwlVersion is not None and "cwlVersion" not in r: u = save_relative_uri( self.cwlVersion, str(self.id), False, None, relative_uris ) r["cwlVersion"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15436,6 +16144,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.expression is not None and "expression" not in r: r["expression"] = save( @@ -15443,8 +16152,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15453,6 +16163,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -15578,7 +16289,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -15605,7 +16316,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -15623,7 +16334,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -15641,7 +16352,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -15659,7 +16370,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -15677,7 +16388,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", + "the 'outputBinding' field is not valid because:", SourceLine(_doc, "outputBinding", str), [e], ) @@ -15695,7 +16406,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -15713,7 +16424,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputSource` field is not valid because:", + "the 'outputSource' field is not valid because:", SourceLine(_doc, "outputSource", str), [e], ) @@ -15731,7 +16442,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `linkMerge` field is not valid because:", + "the 'linkMerge' field is not valid because:", SourceLine(_doc, "linkMerge", str), [e], ) @@ -15749,7 +16460,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -15802,12 +16513,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -15824,22 +16537,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -15864,23 +16572,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15889,11 +16600,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15902,6 +16614,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -15909,8 +16622,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15919,6 +16633,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -15926,8 +16641,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15936,6 +16652,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -15943,8 +16660,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15953,12 +16671,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15967,6 +16690,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -15974,8 +16698,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15984,11 +16709,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15997,13 +16723,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputSource is not None and "outputSource" not in r: u = save_relative_uri( self.outputSource, str(self.id), False, 1, relative_uris ) r["outputSource"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16012,6 +16739,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.linkMerge is not None and "linkMerge" not in r: r["linkMerge"] = save( @@ -16019,8 +16747,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16029,12 +16758,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16043,6 +16777,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -16182,7 +16917,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -16209,7 +16944,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `source` field is not valid because:", + "the 'source' field is not valid because:", SourceLine(_doc, "source", str), [e], ) @@ -16227,7 +16962,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `linkMerge` field is not valid because:", + "the 'linkMerge' field is not valid because:", SourceLine(_doc, "linkMerge", str), [e], ) @@ -16245,7 +16980,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `default` field is not valid because:", + "the 'default' field is not valid because:", SourceLine(_doc, "default", str), [e], ) @@ -16263,7 +16998,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `valueFrom` field is not valid because:", + "the 'valueFrom' field is not valid because:", SourceLine(_doc, "valueFrom", str), [e], ) @@ -16309,12 +17044,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -16331,22 +17068,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -16371,23 +17103,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16396,11 +17131,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16409,11 +17145,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.source is not None and "source" not in r: u = save_relative_uri(self.source, str(self.id), False, 2, relative_uris) r["source"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16422,6 +17159,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.linkMerge is not None and "linkMerge" not in r: r["linkMerge"] = save( @@ -16429,8 +17167,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16439,6 +17178,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -16446,8 +17186,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16456,6 +17197,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.valueFrom is not None and "valueFrom" not in r: r["valueFrom"] = save( @@ -16463,8 +17205,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16473,6 +17216,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -16544,7 +17288,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -16593,12 +17337,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -16615,22 +17361,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -16655,23 +17396,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16680,11 +17424,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16693,6 +17438,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -16857,7 +17603,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -16883,7 +17629,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `in` field is not valid because:", + "the 'in' field is not valid because:", SourceLine(_doc, "in", str), [e], ) @@ -16898,7 +17644,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `out` field is not valid because:", + "the 'out' field is not valid because:", SourceLine(_doc, "out", str), [e], ) @@ -16914,7 +17660,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `requirements` field is not valid because:", + "the 'requirements' field is not valid because:", SourceLine(_doc, "requirements", str), [e], ) @@ -16932,7 +17678,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `hints` field is not valid because:", + "the 'hints' field is not valid because:", SourceLine(_doc, "hints", str), [e], ) @@ -16950,7 +17696,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -16968,7 +17714,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -16985,7 +17731,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `run` field is not valid because:", + "the 'run' field is not valid because:", SourceLine(_doc, "run", str), [e], ) @@ -17001,7 +17747,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `scatter` field is not valid because:", + "the 'scatter' field is not valid because:", SourceLine(_doc, "scatter", str), [e], ) @@ -17019,7 +17765,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `scatterMethod` field is not valid because:", + "the 'scatterMethod' field is not valid because:", SourceLine(_doc, "scatterMethod", str), [e], ) @@ -17070,12 +17816,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -17092,22 +17840,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -17132,23 +17875,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17157,11 +17903,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17170,12 +17917,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.in_ is not None and "in" not in r: r["in"] = save( - self.in_, top=False, base_url=str(self.id), relative_uris=relative_uris + self.in_, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17184,11 +17936,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.out is not None and "out" not in r: u = save_relative_uri(self.out, str(self.id), True, None, relative_uris) r["out"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17197,6 +17950,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -17204,8 +17958,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17214,6 +17969,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -17221,8 +17977,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17231,6 +17988,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -17238,8 +17996,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17248,12 +18007,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17262,11 +18026,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.run is not None and "run" not in r: u = save_relative_uri(self.run, str(self.id), False, None, relative_uris) r["run"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17275,11 +18040,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.scatter is not None and "scatter" not in r: u = save_relative_uri(self.scatter, str(self.id), False, 0, relative_uris) r["scatter"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17288,13 +18054,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.scatterMethod is not None and "scatterMethod" not in r: u = save_relative_uri( self.scatterMethod, str(self.id), False, None, relative_uris ) r["scatterMethod"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17303,6 +18070,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -17473,7 +18241,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -17499,7 +18267,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputs` field is not valid because:", + "the 'inputs' field is not valid because:", SourceLine(_doc, "inputs", str), [e], ) @@ -17514,7 +18282,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputs` field is not valid because:", + "the 'outputs' field is not valid because:", SourceLine(_doc, "outputs", str), [e], ) @@ -17530,7 +18298,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `requirements` field is not valid because:", + "the 'requirements' field is not valid because:", SourceLine(_doc, "requirements", str), [e], ) @@ -17548,7 +18316,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `hints` field is not valid because:", + "the 'hints' field is not valid because:", SourceLine(_doc, "hints", str), [e], ) @@ -17566,7 +18334,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -17584,7 +18352,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -17602,7 +18370,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", + "the 'cwlVersion' field is not valid because:", SourceLine(_doc, "cwlVersion", str), [e], ) @@ -17619,7 +18387,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `steps` field is not valid because:", + "the 'steps' field is not valid because:", SourceLine(_doc, "steps", str), [e], ) @@ -17667,12 +18435,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -17689,22 +18459,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -17731,23 +18496,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17756,11 +18524,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17769,6 +18538,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -17776,8 +18546,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17786,6 +18557,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -17793,8 +18565,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17803,6 +18576,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -17810,8 +18584,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17820,6 +18595,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -17827,8 +18603,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17837,6 +18614,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -17844,8 +18622,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17854,12 +18633,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17868,13 +18652,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.cwlVersion is not None and "cwlVersion" not in r: u = save_relative_uri( self.cwlVersion, str(self.id), False, None, relative_uris ) r["cwlVersion"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17883,6 +18668,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.steps is not None and "steps" not in r: r["steps"] = save( @@ -17890,8 +18676,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17900,6 +18687,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -18007,12 +18795,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -18047,23 +18837,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18072,6 +18865,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) # top refers to the directory level @@ -18166,12 +18960,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -18206,23 +19002,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18231,6 +19030,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) # top refers to the directory level @@ -18325,12 +19125,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -18365,23 +19167,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18390,6 +19195,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) # top refers to the directory level @@ -18484,12 +19290,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -18524,23 +19332,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18549,6 +19360,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) # top refers to the directory level @@ -18779,7 +19591,23 @@ def save( ), "PrimitiveType", ) +""" +Salad data types are based on Avro schema declarations. Refer to the +[Avro schema declaration documentation](https://avro.apache.org/docs/current/spec.html#schemas) for +detailed information. + +null: no value +boolean: a binary value +int: 32-bit signed integer +long: 64-bit signed integer +float: single precision (32-bit) IEEE 754 floating-point number +double: double precision (64-bit) IEEE 754 floating-point number +string: Unicode character sequence +""" AnyLoader = _EnumLoader(("Any",), "Any") +""" +The **Any** type validates for any non-null value. +""" RecordFieldLoader = _RecordLoader(RecordField) RecordSchemaLoader = _RecordLoader(RecordSchema) EnumSchemaLoader = _RecordLoader(EnumSchema) @@ -18801,6 +19629,9 @@ def save( ), "CWLVersion", ) +""" +Version symbols for published CWL document versions. +""" CWLTypeLoader = _EnumLoader( ( "null", @@ -18815,6 +19646,11 @@ def save( ), "CWLType", ) +""" +Extends primitive types with the concept of a file and directory as a builtin type. +File: A File object +Directory: A Directory object +""" FileLoader = _RecordLoader(File) DirectoryLoader = _RecordLoader(Directory) ExpressionLoader = _ExpressionLoader(str) @@ -18844,7 +19680,93 @@ def save( CommandInputParameterLoader = _RecordLoader(CommandInputParameter) CommandOutputParameterLoader = _RecordLoader(CommandOutputParameter) stdoutLoader = _EnumLoader(("stdout",), "stdout") +""" +Only valid as a `type` for a `CommandLineTool` output with no +`outputBinding` set. + +The following +``` +outputs: + an_output_name: + type: stdout + +stdout: a_stdout_file +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stdout_file + +stdout: a_stdout_file +``` + +If there is no `stdout` name provided, a random filename will be created. +For example, the following +``` +outputs: + an_output_name: + type: stdout +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stdout_filenameABCDEFG + +stdout: random_stdout_filenameABCDEFG +``` +""" stderrLoader = _EnumLoader(("stderr",), "stderr") +""" +Only valid as a `type` for a `CommandLineTool` output with no +`outputBinding` set. + +The following +``` +outputs: + an_output_name: + type: stderr + +stderr: a_stderr_file +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stderr_file + +stderr: a_stderr_file +``` + +If there is no `stderr` name provided, a random filename will be created. +For example, the following +``` +outputs: + an_output_name: + type: stderr +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stderr_filenameABCDEFG + +stderr: random_stderr_filenameABCDEFG +``` +""" CommandLineToolLoader = _RecordLoader(CommandLineTool) DockerRequirementLoader = _RecordLoader(DockerRequirement) SoftwareRequirementLoader = _RecordLoader(SoftwareRequirement) @@ -18863,6 +19785,9 @@ def save( ), "LinkMergeMethod", ) +""" +The input link merge method, described in [WorkflowStepInput](#WorkflowStepInput). +""" WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter) WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput) WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput) @@ -18874,6 +19799,9 @@ def save( ), "ScatterMethod", ) +""" +The scatter method, as described in [workflow step scatter](#WorkflowStep). +""" WorkflowStepLoader = _RecordLoader(WorkflowStep) WorkflowLoader = _RecordLoader(Workflow) SubworkflowFeatureRequirementLoader = _RecordLoader(SubworkflowFeatureRequirement) diff --git a/schema_salad/tests/cwl_v1_1.py b/schema_salad/tests/cwl_v1_1.py index c3ad579d5..2d851808e 100644 --- a/schema_salad/tests/cwl_v1_1.py +++ b/schema_salad/tests/cwl_v1_1.py @@ -45,10 +45,7 @@ IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] - doc_line_info = CommentedMap() -inserted_line_info: Dict[int, int] = {} - class LoadingOptions: idx: IdxType @@ -138,9 +135,7 @@ def __init__( ) self.fetcher: Fetcher = DefaultFetcher({}, session) - self.cache = ( - self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} - ) + self.cache = self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} self.vocab = _vocab self.rvocab = _rvocab @@ -172,9 +167,7 @@ def graph(self) -> Graph: try: content = self.fetcher.fetch_text(fetchurl) except Exception as e: - _logger.warning( - "Could not load extension schema %s: %s", fetchurl, str(e) - ) + _logger.warning("Could not load extension schema %s: %s", fetchurl, str(e)) continue newGraph = Graph() err_msg = "unknown error" @@ -187,9 +180,7 @@ def graph(self) -> Graph: except (xml.sax.SAXParseException, TypeError, BadSyntax) as e: err_msg = str(e) else: - _logger.warning( - "Could not load extension schema %s: %s", fetchurl, err_msg - ) + _logger.warning("Could not load extension schema %s: %s", fetchurl, err_msg) self.cache[key] = graph return graph @@ -233,20 +224,16 @@ def load_field(val, fieldtype, baseuri, loadingOptions): ) loadingOptions.imports.append(url) return result - elif "$include" in val: + if "$include" in val: if loadingOptions.fileuri is None: raise SchemaSaladException("Cannot load $import without fileuri") - url = loadingOptions.fetcher.urljoin( - loadingOptions.fileuri, val["$include"] - ) + url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) val = loadingOptions.fetcher.fetch_text(url) loadingOptions.includes.append(url) return fieldtype.load(val, baseuri, loadingOptions) -save_type = Optional[ - Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str] -] +save_type = Optional[Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str]] def add_kv( @@ -258,6 +245,7 @@ def add_kv( max_len: int, cols: Dict[int, int], min_col: int = 0, + inserted_line_info: Dict[int, int] = {} ) -> int: """Add key value pair into Commented Map. @@ -402,6 +390,7 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> save_type: """Save a val of any type. @@ -425,7 +414,7 @@ def save( if isinstance(val, Saveable): return val.save( - top=top, base_url=base_url, relative_uris=relative_uris, keys=keys + top=top, base_url=base_url, relative_uris=relative_uris, keys=keys, inserted_line_info=inserted_line_info ) if isinstance(val, MutableSequence): r = CommentedSeq() @@ -443,17 +432,16 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=new_keys, + inserted_line_info=inserted_line_info ) ) return r - # return [ - # save(v, top=False, base_url=base_url, relative_uris=relative_uris) - # for v in val - # ] + if isinstance(val, MutableMapping): newdict = CommentedMap() new_keys = keys for key in val: + if doc: if key in doc: newdict.lc.add_kv_line_col(key, doc.lc.data[key]) @@ -465,14 +453,10 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=new_keys, + inserted_line_info=inserted_line_info, ) + return newdict - # newdict = {} - # for key in val: - # newdict[key] = save( - # val[key], top=False, base_url=base_url, relative_uris=relative_uris - # ) - # return newdict if val is None or isinstance(val, (int, float, bool, str)): return val raise Exception("Not Saveable: %s" % type(val)) @@ -529,10 +513,7 @@ def expand_url( split = urlsplit(url) if ( - ( - bool(split.scheme) - and split.scheme in loadingOptions.fetcher.supported_schemes() - ) + (bool(split.scheme) and split.scheme in loadingOptions.fetcher.supported_schemes()) or url.startswith("$(") or url.startswith("${") ): @@ -572,7 +553,7 @@ def expand_url( if url in loadingOptions.rvocab: return loadingOptions.rvocab[url] else: - raise ValidationException(f"Term '{url}' not in vocabulary") + raise ValidationException(f"Term {url!r} not in vocabulary") return url @@ -623,9 +604,7 @@ def load(self, doc, baseuri, loadingOptions, docRoot=None): errors = [] # type: List[SchemaSaladException] for i in range(0, len(doc)): try: - lf = load_field( - doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions - ) + lf = load_field(doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions) if isinstance(lf, MutableSequence): r.extend(lf) else: @@ -649,8 +628,7 @@ def load(self, doc, baseuri, loadingOptions, docRoot=None): # type: (Any, str, LoadingOptions, Optional[str]) -> Any if doc in self.symbols: return doc - else: - raise ValidationException(f"Expected one of {self.symbols}") + raise ValidationException(f"Expected one of {self.symbols}") def __repr__(self): # type: () -> str return self.name @@ -678,9 +656,7 @@ def load(self, doc, baseuri, loadingOptions, docRoot=None): new_dict["pattern"] = dict_copy.pop("pattern") else: raise ValidationException( - "Missing pattern in secondaryFiles specification entry: {}".format( - d - ) + f"Missing pattern in secondaryFiles specification entry: {d}" ) new_dict["required"] = ( dict_copy.pop("required") if "required" in dict_copy else None @@ -705,19 +681,13 @@ def load(self, doc, baseuri, loadingOptions, docRoot=None): new_dict["pattern"] = doc_copy.pop("pattern") else: raise ValidationException( - "Missing pattern in secondaryFiles specification entry: {}".format( - doc - ) + f"Missing pattern in secondaryFiles specification entry: {doc}" ) - new_dict["required"] = ( - doc_copy.pop("required") if "required" in doc_copy else None - ) + new_dict["required"] = doc_copy.pop("required") if "required" in doc_copy else None if len(doc_copy): raise ValidationException( - "Unallowed values in secondaryFiles specification entry: {}".format( - doc_copy - ) + f"Unallowed values in secondaryFiles specification entry: {doc_copy}" ) r.append(new_dict) @@ -833,9 +803,7 @@ def resolve( if m: group1 = m.group(1) assert group1 is not None # nosec - first = expand_url( - group1, baseuri, loadingOptions, False, True, self.refScope - ) + first = expand_url(group1, baseuri, loadingOptions, False, True, self.refScope) second = third = None if bool(m.group(2)): second = {"type": "array", "items": first} @@ -944,11 +912,6 @@ def _document_load( addl_metadata=addl_metadata, ) - # doc = { - # k: v - # for k, v in doc.items() - # if k not in ("$namespaces", "$schemas", "$base") - # } doc = copy.copy(doc) if "$namespaces" in doc: doc.pop("$namespaces") @@ -1000,10 +963,7 @@ def _document_load_by_url( doc_url, frg = urldefrag(url) text = loadingOptions.fetcher.fetch_text(doc_url) - if isinstance(text, bytes): - textIO = StringIO(text.decode("utf-8")) - else: - textIO = StringIO(text) + textIO = StringIO(text) textIO.name = str(doc_url) yaml = yaml_no_ts() result = yaml.load(textIO) @@ -1034,8 +994,7 @@ def file_uri(path, split_frag=False): # type: (str, bool) -> str frag = "" if urlpath.startswith("//"): return f"file:{urlpath}{frag}" - else: - return f"file://{urlpath}{frag}" + return f"file://{urlpath}{frag}" def prefix_url(url: str, namespaces: Dict[str, str]) -> str: @@ -1055,10 +1014,7 @@ def save_relative_uri( ) -> Any: """Convert any URI to a relative one, obeying the scoping rules.""" if isinstance(uri, MutableSequence): - return [ - save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) - for u in uri - ] + return [save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) for u in uri] elif isinstance(uri, str): if not relative_uris or uri == base_url: return uri @@ -1082,8 +1038,7 @@ def save_relative_uri( if urisplit.fragment.startswith(basefrag): return urisplit.fragment[len(basefrag) :] - else: - return urisplit.fragment + return urisplit.fragment return uri else: return save(uri, top=False, base_url=base_url, relative_uris=relative_uris) @@ -1171,7 +1126,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -1198,7 +1153,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -1215,7 +1170,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -1257,12 +1212,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -1295,23 +1252,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1320,11 +1280,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1333,6 +1294,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -1340,8 +1302,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1350,6 +1313,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -1357,8 +1321,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1367,6 +1332,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -1432,7 +1398,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `fields` field is not valid because:", + "the 'fields' field is not valid because:", SourceLine(_doc, "fields", str), [e], ) @@ -1449,7 +1415,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -1489,12 +1455,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -1527,23 +1495,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1552,12 +1523,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.fields is not None and "fields" not in r: r["fields"] = save( - self.fields, top=False, base_url=base_url, relative_uris=relative_uris + self.fields, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1566,12 +1542,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1580,6 +1561,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -1649,7 +1631,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", + "the 'symbols' field is not valid because:", SourceLine(_doc, "symbols", str), [e], ) @@ -1664,7 +1646,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -1704,12 +1686,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -1742,23 +1726,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1767,11 +1754,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) r["symbols"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1780,12 +1768,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1794,6 +1787,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -1858,7 +1852,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `items` field is not valid because:", + "the 'items' field is not valid because:", SourceLine(_doc, "items", str), [e], ) @@ -1873,7 +1867,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -1913,12 +1907,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -1951,23 +1947,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1976,12 +1975,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.items is not None and "items" not in r: r["items"] = save( - self.items, top=False, base_url=base_url, relative_uris=relative_uris + self.items, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1990,12 +1994,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2004,6 +2013,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -2190,7 +2200,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `location` field is not valid because:", + "the 'location' field is not valid because:", SourceLine(_doc, "location", str), [e], ) @@ -2208,7 +2218,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `path` field is not valid because:", + "the 'path' field is not valid because:", SourceLine(_doc, "path", str), [e], ) @@ -2226,7 +2236,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `basename` field is not valid because:", + "the 'basename' field is not valid because:", SourceLine(_doc, "basename", str), [e], ) @@ -2244,7 +2254,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dirname` field is not valid because:", + "the 'dirname' field is not valid because:", SourceLine(_doc, "dirname", str), [e], ) @@ -2262,7 +2272,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `nameroot` field is not valid because:", + "the 'nameroot' field is not valid because:", SourceLine(_doc, "nameroot", str), [e], ) @@ -2280,7 +2290,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `nameext` field is not valid because:", + "the 'nameext' field is not valid because:", SourceLine(_doc, "nameext", str), [e], ) @@ -2298,7 +2308,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `checksum` field is not valid because:", + "the 'checksum' field is not valid because:", SourceLine(_doc, "checksum", str), [e], ) @@ -2316,7 +2326,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `size` field is not valid because:", + "the 'size' field is not valid because:", SourceLine(_doc, "size", str), [e], ) @@ -2334,7 +2344,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -2352,7 +2362,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -2370,7 +2380,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `contents` field is not valid because:", + "the 'contents' field is not valid because:", SourceLine(_doc, "contents", str), [e], ) @@ -2421,12 +2431,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -2461,23 +2473,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2486,11 +2501,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) r["location"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2499,11 +2515,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.path is not None and "path" not in r: u = save_relative_uri(self.path, base_url, False, None, relative_uris) r["path"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2512,12 +2529,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.basename is not None and "basename" not in r: r["basename"] = save( - self.basename, top=False, base_url=base_url, relative_uris=relative_uris + self.basename, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2526,12 +2548,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dirname is not None and "dirname" not in r: r["dirname"] = save( - self.dirname, top=False, base_url=base_url, relative_uris=relative_uris + self.dirname, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2540,12 +2567,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.nameroot is not None and "nameroot" not in r: r["nameroot"] = save( - self.nameroot, top=False, base_url=base_url, relative_uris=relative_uris + self.nameroot, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2554,12 +2586,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.nameext is not None and "nameext" not in r: r["nameext"] = save( - self.nameext, top=False, base_url=base_url, relative_uris=relative_uris + self.nameext, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2568,12 +2605,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.checksum is not None and "checksum" not in r: r["checksum"] = save( - self.checksum, top=False, base_url=base_url, relative_uris=relative_uris + self.checksum, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2582,12 +2624,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.size is not None and "size" not in r: r["size"] = save( - self.size, top=False, base_url=base_url, relative_uris=relative_uris + self.size, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2596,6 +2643,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -2603,8 +2651,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2613,11 +2662,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, base_url, True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2626,12 +2676,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.contents is not None and "contents" not in r: r["contents"] = save( - self.contents, top=False, base_url=base_url, relative_uris=relative_uris + self.contents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2640,6 +2695,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -2784,7 +2840,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `location` field is not valid because:", + "the 'location' field is not valid because:", SourceLine(_doc, "location", str), [e], ) @@ -2802,7 +2858,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `path` field is not valid because:", + "the 'path' field is not valid because:", SourceLine(_doc, "path", str), [e], ) @@ -2820,7 +2876,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `basename` field is not valid because:", + "the 'basename' field is not valid because:", SourceLine(_doc, "basename", str), [e], ) @@ -2838,7 +2894,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `listing` field is not valid because:", + "the 'listing' field is not valid because:", SourceLine(_doc, "listing", str), [e], ) @@ -2882,12 +2938,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -2922,23 +2980,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2947,11 +3008,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) r["location"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2960,11 +3022,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.path is not None and "path" not in r: u = save_relative_uri(self.path, base_url, False, None, relative_uris) r["path"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2973,12 +3036,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.basename is not None and "basename" not in r: r["basename"] = save( - self.basename, top=False, base_url=base_url, relative_uris=relative_uris + self.basename, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2987,12 +3055,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.listing is not None and "listing" not in r: r["listing"] = save( - self.listing, top=False, base_url=base_url, relative_uris=relative_uris + self.listing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3001,6 +3074,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -3097,7 +3171,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -3138,12 +3212,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -3176,23 +3252,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3201,6 +3280,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -3208,8 +3288,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3218,6 +3299,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -3331,7 +3413,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -3358,7 +3440,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -3375,7 +3457,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -3391,7 +3473,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -3409,7 +3491,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -3427,7 +3509,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -3445,7 +3527,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -3463,7 +3545,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -3481,7 +3563,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", + "the 'loadListing' field is not valid because:", SourceLine(_doc, "loadListing", str), [e], ) @@ -3531,12 +3613,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -3569,23 +3653,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3594,11 +3681,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3607,6 +3695,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -3614,8 +3703,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3624,6 +3714,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -3631,8 +3722,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3641,6 +3733,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -3648,8 +3741,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3658,6 +3752,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -3665,8 +3760,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3675,6 +3771,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -3682,8 +3779,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3692,13 +3790,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri( self.format, str(self.name), True, None, relative_uris ) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3707,6 +3806,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -3714,8 +3814,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3724,6 +3825,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -3731,8 +3833,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3741,6 +3844,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -3830,7 +3934,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -3857,7 +3961,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `fields` field is not valid because:", + "the 'fields' field is not valid because:", SourceLine(_doc, "fields", str), [e], ) @@ -3874,7 +3978,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -3890,7 +3994,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -3908,7 +4012,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -3954,12 +4058,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -3992,23 +4098,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4017,11 +4126,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4030,6 +4140,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -4037,8 +4148,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4047,6 +4159,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4054,8 +4167,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4064,6 +4178,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -4071,8 +4186,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4081,6 +4197,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -4088,8 +4205,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4098,6 +4216,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -4175,7 +4294,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -4201,7 +4320,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", + "the 'symbols' field is not valid because:", SourceLine(_doc, "symbols", str), [e], ) @@ -4216,7 +4335,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -4232,7 +4351,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -4250,7 +4369,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -4296,12 +4415,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -4334,23 +4455,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4359,11 +4483,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4372,13 +4497,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri( self.symbols, str(self.name), True, None, relative_uris ) r["symbols"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4387,6 +4513,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4394,8 +4521,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4404,6 +4532,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -4411,8 +4540,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4421,6 +4551,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -4428,8 +4559,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4438,6 +4570,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -4515,7 +4648,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -4541,7 +4674,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `items` field is not valid because:", + "the 'items' field is not valid because:", SourceLine(_doc, "items", str), [e], ) @@ -4556,7 +4689,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -4572,7 +4705,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -4590,7 +4723,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -4636,12 +4769,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -4674,23 +4809,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4699,11 +4837,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4712,6 +4851,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -4719,8 +4859,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4729,6 +4870,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4736,8 +4878,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4746,6 +4889,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -4753,8 +4897,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4763,6 +4908,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -4770,8 +4916,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4780,6 +4927,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -4873,7 +5021,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -4900,7 +5048,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -4917,7 +5065,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -4933,7 +5081,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -4951,7 +5099,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -4969,7 +5117,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -4987,7 +5135,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -5035,12 +5183,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -5073,23 +5223,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5098,11 +5251,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5111,6 +5265,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -5118,8 +5273,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5128,6 +5284,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -5135,8 +5292,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5145,6 +5303,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -5152,8 +5311,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5162,6 +5322,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -5169,8 +5330,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5179,6 +5341,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -5186,8 +5349,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5196,13 +5360,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri( self.format, str(self.name), True, None, relative_uris ) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5211,6 +5376,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -5290,7 +5456,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -5317,7 +5483,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `fields` field is not valid because:", + "the 'fields' field is not valid because:", SourceLine(_doc, "fields", str), [e], ) @@ -5334,7 +5500,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -5350,7 +5516,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -5368,7 +5534,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -5414,12 +5580,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -5452,23 +5620,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5477,11 +5648,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5490,6 +5662,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -5497,8 +5670,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5507,6 +5681,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -5514,8 +5689,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5524,6 +5700,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -5531,8 +5708,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5541,6 +5719,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -5548,8 +5727,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5558,6 +5738,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -5635,7 +5816,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -5661,7 +5842,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", + "the 'symbols' field is not valid because:", SourceLine(_doc, "symbols", str), [e], ) @@ -5676,7 +5857,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -5692,7 +5873,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -5710,7 +5891,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -5756,12 +5937,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -5794,23 +5977,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5819,11 +6005,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5832,13 +6019,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri( self.symbols, str(self.name), True, None, relative_uris ) r["symbols"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5847,6 +6035,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -5854,8 +6043,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5864,6 +6054,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -5871,8 +6062,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5881,6 +6073,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -5888,8 +6081,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5898,6 +6092,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -5975,7 +6170,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -6001,7 +6196,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `items` field is not valid because:", + "the 'items' field is not valid because:", SourceLine(_doc, "items", str), [e], ) @@ -6016,7 +6211,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -6032,7 +6227,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -6050,7 +6245,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -6096,12 +6291,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -6134,23 +6331,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6159,11 +6359,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6172,6 +6373,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -6179,8 +6381,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6189,6 +6392,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -6196,8 +6400,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6206,6 +6411,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -6213,8 +6419,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6223,6 +6430,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -6230,8 +6438,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6240,6 +6449,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -6352,7 +6562,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `expressionLib` field is not valid because:", + "the 'expressionLib' field is not valid because:", SourceLine(_doc, "expressionLib", str), [e], ) @@ -6395,12 +6605,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -6435,23 +6647,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6460,6 +6675,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.expressionLib is not None and "expressionLib" not in r: r["expressionLib"] = save( @@ -6467,8 +6683,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6477,6 +6694,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -6559,7 +6777,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `types` field is not valid because:", + "the 'types' field is not valid because:", SourceLine(_doc, "types", str), [e], ) @@ -6598,12 +6816,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -6638,23 +6858,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6663,12 +6886,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.types is not None and "types" not in r: r["types"] = save( - self.types, top=False, base_url=base_url, relative_uris=relative_uris + self.types, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6677,6 +6905,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -6743,7 +6972,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `pattern` field is not valid because:", + "the 'pattern' field is not valid because:", SourceLine(_doc, "pattern", str), [e], ) @@ -6759,7 +6988,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `required` field is not valid because:", + "the 'required' field is not valid because:", SourceLine(_doc, "required", str), [e], ) @@ -6801,12 +7030,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -6839,23 +7070,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6864,12 +7098,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.pattern is not None and "pattern" not in r: r["pattern"] = save( - self.pattern, top=False, base_url=base_url, relative_uris=relative_uris + self.pattern, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6878,12 +7117,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.required is not None and "required" not in r: r["required"] = save( - self.required, top=False, base_url=base_url, relative_uris=relative_uris + self.required, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6892,6 +7136,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -6968,7 +7213,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", + "the 'loadListing' field is not valid because:", SourceLine(_doc, "loadListing", str), [e], ) @@ -7011,12 +7256,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -7051,23 +7298,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7076,6 +7326,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -7083,8 +7334,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7093,6 +7345,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -7166,7 +7419,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `envName` field is not valid because:", + "the 'envName' field is not valid because:", SourceLine(_doc, "envName", str), [e], ) @@ -7181,7 +7434,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `envValue` field is not valid because:", + "the 'envValue' field is not valid because:", SourceLine(_doc, "envValue", str), [e], ) @@ -7221,12 +7474,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -7259,23 +7514,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7284,12 +7542,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.envName is not None and "envName" not in r: r["envName"] = save( - self.envName, top=False, base_url=base_url, relative_uris=relative_uris + self.envName, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7298,12 +7561,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.envValue is not None and "envValue" not in r: r["envValue"] = save( - self.envValue, top=False, base_url=base_url, relative_uris=relative_uris + self.envValue, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7312,6 +7580,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -7444,7 +7713,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -7462,7 +7731,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `position` field is not valid because:", + "the 'position' field is not valid because:", SourceLine(_doc, "position", str), [e], ) @@ -7480,7 +7749,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `prefix` field is not valid because:", + "the 'prefix' field is not valid because:", SourceLine(_doc, "prefix", str), [e], ) @@ -7498,7 +7767,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `separate` field is not valid because:", + "the 'separate' field is not valid because:", SourceLine(_doc, "separate", str), [e], ) @@ -7516,7 +7785,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `itemSeparator` field is not valid because:", + "the 'itemSeparator' field is not valid because:", SourceLine(_doc, "itemSeparator", str), [e], ) @@ -7534,7 +7803,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `valueFrom` field is not valid because:", + "the 'valueFrom' field is not valid because:", SourceLine(_doc, "valueFrom", str), [e], ) @@ -7552,7 +7821,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `shellQuote` field is not valid because:", + "the 'shellQuote' field is not valid because:", SourceLine(_doc, "shellQuote", str), [e], ) @@ -7599,12 +7868,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -7637,23 +7908,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7662,6 +7936,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -7669,8 +7944,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7679,12 +7955,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.position is not None and "position" not in r: r["position"] = save( - self.position, top=False, base_url=base_url, relative_uris=relative_uris + self.position, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7693,12 +7974,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.prefix is not None and "prefix" not in r: r["prefix"] = save( - self.prefix, top=False, base_url=base_url, relative_uris=relative_uris + self.prefix, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7707,12 +7993,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.separate is not None and "separate" not in r: r["separate"] = save( - self.separate, top=False, base_url=base_url, relative_uris=relative_uris + self.separate, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7721,6 +8012,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.itemSeparator is not None and "itemSeparator" not in r: r["itemSeparator"] = save( @@ -7728,8 +8020,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7738,6 +8031,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.valueFrom is not None and "valueFrom" not in r: r["valueFrom"] = save( @@ -7745,8 +8039,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7755,6 +8050,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.shellQuote is not None and "shellQuote" not in r: r["shellQuote"] = save( @@ -7762,8 +8058,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7772,6 +8069,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -7870,7 +8168,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -7888,7 +8186,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", + "the 'loadListing' field is not valid because:", SourceLine(_doc, "loadListing", str), [e], ) @@ -7906,7 +8204,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `glob` field is not valid because:", + "the 'glob' field is not valid because:", SourceLine(_doc, "glob", str), [e], ) @@ -7924,7 +8222,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputEval` field is not valid because:", + "the 'outputEval' field is not valid because:", SourceLine(_doc, "outputEval", str), [e], ) @@ -7968,12 +8266,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -8006,23 +8306,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8031,6 +8334,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8038,8 +8342,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8048,6 +8353,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -8055,8 +8361,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8065,12 +8372,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.glob is not None and "glob" not in r: r["glob"] = save( - self.glob, top=False, base_url=base_url, relative_uris=relative_uris + self.glob, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8079,6 +8391,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputEval is not None and "outputEval" not in r: r["outputEval"] = save( @@ -8086,8 +8399,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8096,6 +8410,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -8159,7 +8474,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -8200,12 +8515,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -8238,23 +8555,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8263,6 +8583,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -8270,8 +8591,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8280,6 +8602,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -8385,7 +8708,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -8412,7 +8735,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -8429,7 +8752,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -8445,7 +8768,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -8463,7 +8786,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -8481,7 +8804,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -8499,7 +8822,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -8517,7 +8840,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -8535,7 +8858,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", + "the 'loadListing' field is not valid because:", SourceLine(_doc, "loadListing", str), [e], ) @@ -8553,7 +8876,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -8606,12 +8929,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -8644,23 +8969,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8669,11 +8997,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8682,6 +9011,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -8689,8 +9019,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8699,6 +9030,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -8706,8 +9038,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8716,6 +9049,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -8723,8 +9057,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8733,6 +9068,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -8740,8 +9076,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8750,6 +9087,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -8757,8 +9095,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8767,13 +9106,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri( self.format, str(self.name), True, None, relative_uris ) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8782,6 +9122,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8789,8 +9130,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8799,6 +9141,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -8806,8 +9149,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8816,6 +9160,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -8823,8 +9168,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8833,6 +9179,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -8930,7 +9277,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -8957,7 +9304,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `fields` field is not valid because:", + "the 'fields' field is not valid because:", SourceLine(_doc, "fields", str), [e], ) @@ -8974,7 +9321,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -8990,7 +9337,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -9008,7 +9355,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -9026,7 +9373,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -9075,12 +9422,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -9113,23 +9462,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9138,11 +9490,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9151,6 +9504,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -9158,8 +9512,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9168,6 +9523,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -9175,8 +9531,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9185,6 +9542,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -9192,8 +9550,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9202,6 +9561,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -9209,8 +9569,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9219,6 +9580,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -9226,8 +9588,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9236,6 +9599,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -9325,7 +9689,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -9351,7 +9715,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", + "the 'symbols' field is not valid because:", SourceLine(_doc, "symbols", str), [e], ) @@ -9366,7 +9730,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -9382,7 +9746,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -9400,7 +9764,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -9418,7 +9782,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -9467,12 +9831,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -9505,23 +9871,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9530,11 +9899,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9543,13 +9913,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri( self.symbols, str(self.name), True, None, relative_uris ) r["symbols"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9558,6 +9929,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -9565,8 +9937,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9575,6 +9948,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -9582,8 +9956,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9592,6 +9967,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -9599,8 +9975,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9609,6 +9986,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -9616,8 +9994,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9626,6 +10005,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -9710,7 +10090,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -9736,7 +10116,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `items` field is not valid because:", + "the 'items' field is not valid because:", SourceLine(_doc, "items", str), [e], ) @@ -9751,7 +10131,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -9767,7 +10147,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -9785,7 +10165,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -9803,7 +10183,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -9852,12 +10232,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -9890,23 +10272,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9915,11 +10300,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9928,6 +10314,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -9935,8 +10322,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9945,6 +10333,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -9952,8 +10341,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9962,6 +10352,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -9969,8 +10360,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9979,6 +10371,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -9986,8 +10379,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9996,6 +10390,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -10003,8 +10398,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10013,6 +10409,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -10110,7 +10507,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -10137,7 +10534,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -10154,7 +10551,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -10170,7 +10567,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -10188,7 +10585,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -10206,7 +10603,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -10224,7 +10621,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -10242,7 +10639,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", + "the 'outputBinding' field is not valid because:", SourceLine(_doc, "outputBinding", str), [e], ) @@ -10293,12 +10690,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -10331,23 +10730,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10356,11 +10758,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10369,6 +10772,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -10376,8 +10780,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10386,6 +10791,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -10393,8 +10799,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10403,6 +10810,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -10410,8 +10818,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10420,6 +10829,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -10427,8 +10837,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10437,6 +10848,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -10444,8 +10856,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10454,13 +10867,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri( self.format, str(self.name), True, None, relative_uris ) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10469,6 +10883,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -10476,8 +10891,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10486,6 +10902,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -10574,7 +10991,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -10601,7 +11018,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `fields` field is not valid because:", + "the 'fields' field is not valid because:", SourceLine(_doc, "fields", str), [e], ) @@ -10618,7 +11035,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -10634,7 +11051,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -10652,7 +11069,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -10700,12 +11117,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -10738,23 +11157,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10763,11 +11185,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10776,6 +11199,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -10783,8 +11207,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10793,6 +11218,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -10800,8 +11226,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10810,6 +11237,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -10817,8 +11245,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10827,6 +11256,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -10834,8 +11264,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10844,6 +11275,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -10921,7 +11353,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -10947,7 +11379,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", + "the 'symbols' field is not valid because:", SourceLine(_doc, "symbols", str), [e], ) @@ -10962,7 +11394,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -10978,7 +11410,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -10996,7 +11428,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -11044,12 +11476,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -11082,23 +11516,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11107,11 +11544,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11120,13 +11558,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri( self.symbols, str(self.name), True, None, relative_uris ) r["symbols"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11135,6 +11574,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -11142,8 +11582,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11152,6 +11593,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -11159,8 +11601,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11169,6 +11612,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -11176,8 +11620,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11186,6 +11631,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -11263,7 +11709,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -11289,7 +11735,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `items` field is not valid because:", + "the 'items' field is not valid because:", SourceLine(_doc, "items", str), [e], ) @@ -11304,7 +11750,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -11320,7 +11766,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -11338,7 +11784,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -11386,12 +11832,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -11424,23 +11872,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11449,11 +11900,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11462,6 +11914,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -11469,8 +11922,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11479,6 +11933,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -11486,8 +11941,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11496,6 +11952,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -11503,8 +11960,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11513,6 +11971,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -11520,8 +11979,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11530,6 +11990,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -11643,7 +12104,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -11670,7 +12131,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -11688,7 +12149,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -11706,7 +12167,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -11724,7 +12185,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -11742,7 +12203,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -11760,7 +12221,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -11778,7 +12239,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", + "the 'loadListing' field is not valid because:", SourceLine(_doc, "loadListing", str), [e], ) @@ -11796,7 +12257,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `default` field is not valid because:", + "the 'default' field is not valid because:", SourceLine(_doc, "default", str), [e], ) @@ -11813,7 +12274,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -11829,7 +12290,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -11881,12 +12342,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -11903,22 +12366,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -11943,23 +12401,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11968,11 +12429,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11981,6 +12443,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -11988,8 +12451,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11998,6 +12462,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -12005,8 +12470,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12015,6 +12481,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -12022,8 +12489,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12032,12 +12500,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12046,11 +12519,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12059,6 +12533,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -12066,8 +12541,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12076,6 +12552,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -12083,8 +12560,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12093,6 +12571,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -12100,8 +12579,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12110,12 +12590,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12124,6 +12609,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -12131,8 +12617,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12141,6 +12628,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -12256,7 +12744,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -12283,7 +12771,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -12301,7 +12789,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -12319,7 +12807,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -12337,7 +12825,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -12355,7 +12843,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -12372,7 +12860,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -12388,7 +12876,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", + "the 'outputBinding' field is not valid because:", SourceLine(_doc, "outputBinding", str), [e], ) @@ -12439,12 +12927,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -12461,22 +12951,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -12501,23 +12986,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12526,11 +13014,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12539,6 +13028,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -12546,8 +13036,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12556,6 +13047,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -12563,8 +13055,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12573,6 +13066,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -12580,8 +13074,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12590,12 +13085,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12604,11 +13104,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12617,12 +13118,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12631,6 +13137,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -12638,8 +13145,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12648,6 +13156,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -12800,7 +13309,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -12827,7 +13336,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -12845,7 +13354,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -12862,7 +13371,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputs` field is not valid because:", + "the 'inputs' field is not valid because:", SourceLine(_doc, "inputs", str), [e], ) @@ -12877,7 +13386,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputs` field is not valid because:", + "the 'outputs' field is not valid because:", SourceLine(_doc, "outputs", str), [e], ) @@ -12893,7 +13402,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `requirements` field is not valid because:", + "the 'requirements' field is not valid because:", SourceLine(_doc, "requirements", str), [e], ) @@ -12911,7 +13420,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `hints` field is not valid because:", + "the 'hints' field is not valid because:", SourceLine(_doc, "hints", str), [e], ) @@ -12929,7 +13438,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", + "the 'cwlVersion' field is not valid because:", SourceLine(_doc, "cwlVersion", str), [e], ) @@ -12947,7 +13456,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `baseCommand` field is not valid because:", + "the 'baseCommand' field is not valid because:", SourceLine(_doc, "baseCommand", str), [e], ) @@ -12965,7 +13474,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `arguments` field is not valid because:", + "the 'arguments' field is not valid because:", SourceLine(_doc, "arguments", str), [e], ) @@ -12983,7 +13492,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `stdin` field is not valid because:", + "the 'stdin' field is not valid because:", SourceLine(_doc, "stdin", str), [e], ) @@ -13001,7 +13510,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `stderr` field is not valid because:", + "the 'stderr' field is not valid because:", SourceLine(_doc, "stderr", str), [e], ) @@ -13019,7 +13528,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `stdout` field is not valid because:", + "the 'stdout' field is not valid because:", SourceLine(_doc, "stdout", str), [e], ) @@ -13037,7 +13546,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `successCodes` field is not valid because:", + "the 'successCodes' field is not valid because:", SourceLine(_doc, "successCodes", str), [e], ) @@ -13055,7 +13564,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `temporaryFailCodes` field is not valid because:", + "the 'temporaryFailCodes' field is not valid because:", SourceLine(_doc, "temporaryFailCodes", str), [e], ) @@ -13073,7 +13582,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `permanentFailCodes` field is not valid because:", + "the 'permanentFailCodes' field is not valid because:", SourceLine(_doc, "permanentFailCodes", str), [e], ) @@ -13130,12 +13639,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -13152,22 +13663,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -13194,23 +13700,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13219,11 +13728,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13232,6 +13742,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -13239,8 +13750,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13249,12 +13761,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13263,6 +13780,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -13270,8 +13788,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13280,6 +13799,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -13287,8 +13807,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13297,6 +13818,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -13304,8 +13826,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13314,6 +13837,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -13321,8 +13845,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13331,13 +13856,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.cwlVersion is not None and "cwlVersion" not in r: u = save_relative_uri( self.cwlVersion, str(self.id), False, None, relative_uris ) r["cwlVersion"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13346,6 +13872,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.baseCommand is not None and "baseCommand" not in r: r["baseCommand"] = save( @@ -13353,8 +13880,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13363,6 +13891,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.arguments is not None and "arguments" not in r: r["arguments"] = save( @@ -13370,8 +13899,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13380,6 +13910,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.stdin is not None and "stdin" not in r: r["stdin"] = save( @@ -13387,8 +13918,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13397,6 +13929,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.stderr is not None and "stderr" not in r: r["stderr"] = save( @@ -13404,8 +13937,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13414,6 +13948,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.stdout is not None and "stdout" not in r: r["stdout"] = save( @@ -13421,8 +13956,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13431,6 +13967,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.successCodes is not None and "successCodes" not in r: r["successCodes"] = save( @@ -13438,8 +13975,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13448,6 +13986,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.temporaryFailCodes is not None and "temporaryFailCodes" not in r: r["temporaryFailCodes"] = save( @@ -13455,8 +13994,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13465,6 +14005,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.permanentFailCodes is not None and "permanentFailCodes" not in r: r["permanentFailCodes"] = save( @@ -13472,8 +14013,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13482,6 +14024,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -13653,7 +14196,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerPull` field is not valid because:", + "the 'dockerPull' field is not valid because:", SourceLine(_doc, "dockerPull", str), [e], ) @@ -13671,7 +14214,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerLoad` field is not valid because:", + "the 'dockerLoad' field is not valid because:", SourceLine(_doc, "dockerLoad", str), [e], ) @@ -13689,7 +14232,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerFile` field is not valid because:", + "the 'dockerFile' field is not valid because:", SourceLine(_doc, "dockerFile", str), [e], ) @@ -13707,7 +14250,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerImport` field is not valid because:", + "the 'dockerImport' field is not valid because:", SourceLine(_doc, "dockerImport", str), [e], ) @@ -13725,7 +14268,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerImageId` field is not valid because:", + "the 'dockerImageId' field is not valid because:", SourceLine(_doc, "dockerImageId", str), [e], ) @@ -13743,7 +14286,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerOutputDirectory` field is not valid because:", + "the 'dockerOutputDirectory' field is not valid because:", SourceLine(_doc, "dockerOutputDirectory", str), [e], ) @@ -13789,12 +14332,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -13829,23 +14374,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13854,6 +14402,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.dockerPull is not None and "dockerPull" not in r: r["dockerPull"] = save( @@ -13861,8 +14410,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13871,6 +14421,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dockerLoad is not None and "dockerLoad" not in r: r["dockerLoad"] = save( @@ -13878,8 +14429,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13888,6 +14440,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dockerFile is not None and "dockerFile" not in r: r["dockerFile"] = save( @@ -13895,8 +14448,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13905,6 +14459,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dockerImport is not None and "dockerImport" not in r: r["dockerImport"] = save( @@ -13912,8 +14467,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13922,6 +14478,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dockerImageId is not None and "dockerImageId" not in r: r["dockerImageId"] = save( @@ -13929,8 +14486,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13939,6 +14497,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dockerOutputDirectory is not None and "dockerOutputDirectory" not in r: r["dockerOutputDirectory"] = save( @@ -13946,8 +14505,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13956,6 +14516,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -14039,7 +14600,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `packages` field is not valid because:", + "the 'packages' field is not valid because:", SourceLine(_doc, "packages", str), [e], ) @@ -14078,12 +14639,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -14118,23 +14681,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14143,12 +14709,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.packages is not None and "packages" not in r: r["packages"] = save( - self.packages, top=False, base_url=base_url, relative_uris=relative_uris + self.packages, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14157,6 +14728,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -14227,7 +14799,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `package` field is not valid because:", + "the 'package' field is not valid because:", SourceLine(_doc, "package", str), [e], ) @@ -14243,7 +14815,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `version` field is not valid because:", + "the 'version' field is not valid because:", SourceLine(_doc, "version", str), [e], ) @@ -14261,7 +14833,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `specs` field is not valid because:", + "the 'specs' field is not valid because:", SourceLine(_doc, "specs", str), [e], ) @@ -14304,12 +14876,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -14342,23 +14916,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14367,12 +14944,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.package is not None and "package" not in r: r["package"] = save( - self.package, top=False, base_url=base_url, relative_uris=relative_uris + self.package, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14381,12 +14963,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.version is not None and "version" not in r: r["version"] = save( - self.version, top=False, base_url=base_url, relative_uris=relative_uris + self.version, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14395,11 +14982,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.specs is not None and "specs" not in r: u = save_relative_uri(self.specs, base_url, False, None, relative_uris) r["specs"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14408,6 +14996,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -14487,7 +15076,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `entryname` field is not valid because:", + "the 'entryname' field is not valid because:", SourceLine(_doc, "entryname", str), [e], ) @@ -14504,7 +15093,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `entry` field is not valid because:", + "the 'entry' field is not valid because:", SourceLine(_doc, "entry", str), [e], ) @@ -14520,7 +15109,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `writable` field is not valid because:", + "the 'writable' field is not valid because:", SourceLine(_doc, "writable", str), [e], ) @@ -14563,12 +15152,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -14601,23 +15192,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14626,6 +15220,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.entryname is not None and "entryname" not in r: r["entryname"] = save( @@ -14633,8 +15228,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14643,12 +15239,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.entry is not None and "entry" not in r: r["entry"] = save( - self.entry, top=False, base_url=base_url, relative_uris=relative_uris + self.entry, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14657,12 +15258,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.writable is not None and "writable" not in r: r["writable"] = save( - self.writable, top=False, base_url=base_url, relative_uris=relative_uris + self.writable, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14671,6 +15277,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -14742,7 +15349,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `listing` field is not valid because:", + "the 'listing' field is not valid because:", SourceLine(_doc, "listing", str), [e], ) @@ -14783,12 +15390,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -14823,23 +15432,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14848,12 +15460,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.listing is not None and "listing" not in r: r["listing"] = save( - self.listing, top=False, base_url=base_url, relative_uris=relative_uris + self.listing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14862,6 +15479,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -14935,7 +15553,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `envDef` field is not valid because:", + "the 'envDef' field is not valid because:", SourceLine(_doc, "envDef", str), [e], ) @@ -14974,12 +15592,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -15014,23 +15634,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15039,12 +15662,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.envDef is not None and "envDef" not in r: r["envDef"] = save( - self.envDef, top=False, base_url=base_url, relative_uris=relative_uris + self.envDef, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15053,6 +15681,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -15152,12 +15781,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -15192,23 +15823,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15217,6 +15851,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) # top refers to the directory level @@ -15345,7 +15980,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `coresMin` field is not valid because:", + "the 'coresMin' field is not valid because:", SourceLine(_doc, "coresMin", str), [e], ) @@ -15363,7 +15998,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `coresMax` field is not valid because:", + "the 'coresMax' field is not valid because:", SourceLine(_doc, "coresMax", str), [e], ) @@ -15381,7 +16016,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `ramMin` field is not valid because:", + "the 'ramMin' field is not valid because:", SourceLine(_doc, "ramMin", str), [e], ) @@ -15399,7 +16034,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `ramMax` field is not valid because:", + "the 'ramMax' field is not valid because:", SourceLine(_doc, "ramMax", str), [e], ) @@ -15417,7 +16052,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `tmpdirMin` field is not valid because:", + "the 'tmpdirMin' field is not valid because:", SourceLine(_doc, "tmpdirMin", str), [e], ) @@ -15435,7 +16070,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `tmpdirMax` field is not valid because:", + "the 'tmpdirMax' field is not valid because:", SourceLine(_doc, "tmpdirMax", str), [e], ) @@ -15453,7 +16088,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outdirMin` field is not valid because:", + "the 'outdirMin' field is not valid because:", SourceLine(_doc, "outdirMin", str), [e], ) @@ -15471,7 +16106,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outdirMax` field is not valid because:", + "the 'outdirMax' field is not valid because:", SourceLine(_doc, "outdirMax", str), [e], ) @@ -15519,12 +16154,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -15559,23 +16196,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15584,12 +16224,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.coresMin is not None and "coresMin" not in r: r["coresMin"] = save( - self.coresMin, top=False, base_url=base_url, relative_uris=relative_uris + self.coresMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15598,12 +16243,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.coresMax is not None and "coresMax" not in r: r["coresMax"] = save( - self.coresMax, top=False, base_url=base_url, relative_uris=relative_uris + self.coresMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15612,12 +16262,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.ramMin is not None and "ramMin" not in r: r["ramMin"] = save( - self.ramMin, top=False, base_url=base_url, relative_uris=relative_uris + self.ramMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15626,12 +16281,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.ramMax is not None and "ramMax" not in r: r["ramMax"] = save( - self.ramMax, top=False, base_url=base_url, relative_uris=relative_uris + self.ramMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15640,6 +16300,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.tmpdirMin is not None and "tmpdirMin" not in r: r["tmpdirMin"] = save( @@ -15647,8 +16308,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15657,6 +16319,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.tmpdirMax is not None and "tmpdirMax" not in r: r["tmpdirMax"] = save( @@ -15664,8 +16327,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15674,6 +16338,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outdirMin is not None and "outdirMin" not in r: r["outdirMin"] = save( @@ -15681,8 +16346,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15691,6 +16357,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outdirMax is not None and "outdirMax" not in r: r["outdirMax"] = save( @@ -15698,8 +16365,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15708,6 +16376,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -15802,7 +16471,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `enableReuse` field is not valid because:", + "the 'enableReuse' field is not valid because:", SourceLine(_doc, "enableReuse", str), [e], ) @@ -15841,12 +16510,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -15881,23 +16552,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15906,6 +16580,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.enableReuse is not None and "enableReuse" not in r: r["enableReuse"] = save( @@ -15913,8 +16588,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15923,6 +16599,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -16012,7 +16689,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `networkAccess` field is not valid because:", + "the 'networkAccess' field is not valid because:", SourceLine(_doc, "networkAccess", str), [e], ) @@ -16051,12 +16728,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -16091,23 +16770,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16116,6 +16798,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.networkAccess is not None and "networkAccess" not in r: r["networkAccess"] = save( @@ -16123,8 +16806,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16133,6 +16817,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -16237,7 +16922,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inplaceUpdate` field is not valid because:", + "the 'inplaceUpdate' field is not valid because:", SourceLine(_doc, "inplaceUpdate", str), [e], ) @@ -16278,12 +16963,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -16318,23 +17005,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16343,6 +17033,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.inplaceUpdate is not None and "inplaceUpdate" not in r: r["inplaceUpdate"] = save( @@ -16350,8 +17041,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16360,6 +17052,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -16440,7 +17133,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `timelimit` field is not valid because:", + "the 'timelimit' field is not valid because:", SourceLine(_doc, "timelimit", str), [e], ) @@ -16479,12 +17172,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -16519,23 +17214,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16544,6 +17242,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.timelimit is not None and "timelimit" not in r: r["timelimit"] = save( @@ -16551,8 +17250,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16561,6 +17261,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -16654,7 +17355,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -16681,7 +17382,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -16699,7 +17400,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -16717,7 +17418,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -16735,7 +17436,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -16753,7 +17454,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -16770,7 +17471,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -16818,12 +17519,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -16840,22 +17543,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -16880,23 +17578,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16905,11 +17606,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16918,6 +17620,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -16925,8 +17628,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16935,6 +17639,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -16942,8 +17647,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16952,6 +17658,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -16959,8 +17666,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16969,12 +17677,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16983,11 +17696,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16996,12 +17710,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17010,6 +17729,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -17121,7 +17841,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -17148,7 +17868,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -17166,7 +17886,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -17184,7 +17904,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -17202,7 +17922,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -17220,7 +17940,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -17238,7 +17958,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -17256,7 +17976,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", + "the 'loadListing' field is not valid because:", SourceLine(_doc, "loadListing", str), [e], ) @@ -17274,7 +17994,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `default` field is not valid because:", + "the 'default' field is not valid because:", SourceLine(_doc, "default", str), [e], ) @@ -17291,7 +18011,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -17307,7 +18027,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -17361,12 +18081,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -17383,22 +18105,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -17423,23 +18140,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17448,11 +18168,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17461,6 +18182,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -17468,8 +18190,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17478,6 +18201,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -17485,8 +18209,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17495,6 +18220,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -17502,8 +18228,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17512,12 +18239,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17526,11 +18258,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17539,6 +18272,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -17546,8 +18280,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17556,6 +18291,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -17563,8 +18299,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17573,6 +18310,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -17580,8 +18318,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17590,12 +18329,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17604,6 +18348,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -17611,8 +18356,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17621,6 +18367,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -17754,7 +18501,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -17781,7 +18528,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -17799,7 +18546,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -17816,7 +18563,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputs` field is not valid because:", + "the 'inputs' field is not valid because:", SourceLine(_doc, "inputs", str), [e], ) @@ -17831,7 +18578,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputs` field is not valid because:", + "the 'outputs' field is not valid because:", SourceLine(_doc, "outputs", str), [e], ) @@ -17847,7 +18594,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `requirements` field is not valid because:", + "the 'requirements' field is not valid because:", SourceLine(_doc, "requirements", str), [e], ) @@ -17865,7 +18612,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `hints` field is not valid because:", + "the 'hints' field is not valid because:", SourceLine(_doc, "hints", str), [e], ) @@ -17883,7 +18630,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", + "the 'cwlVersion' field is not valid because:", SourceLine(_doc, "cwlVersion", str), [e], ) @@ -17900,7 +18647,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `expression` field is not valid because:", + "the 'expression' field is not valid because:", SourceLine(_doc, "expression", str), [e], ) @@ -17948,12 +18695,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -17970,22 +18719,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -18012,23 +18756,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18037,11 +18784,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18050,6 +18798,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -18057,8 +18806,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18067,12 +18817,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18081,6 +18836,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -18088,8 +18844,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18098,6 +18855,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -18105,8 +18863,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18115,6 +18874,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -18122,8 +18882,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18132,6 +18893,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -18139,8 +18901,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18149,13 +18912,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.cwlVersion is not None and "cwlVersion" not in r: u = save_relative_uri( self.cwlVersion, str(self.id), False, None, relative_uris ) r["cwlVersion"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18164,6 +18928,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.expression is not None and "expression" not in r: r["expression"] = save( @@ -18171,8 +18936,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18181,6 +18947,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -18303,7 +19070,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -18330,7 +19097,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -18348,7 +19115,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -18366,7 +19133,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -18384,7 +19151,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -18402,7 +19169,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -18420,7 +19187,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputSource` field is not valid because:", + "the 'outputSource' field is not valid because:", SourceLine(_doc, "outputSource", str), [e], ) @@ -18438,7 +19205,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `linkMerge` field is not valid because:", + "the 'linkMerge' field is not valid because:", SourceLine(_doc, "linkMerge", str), [e], ) @@ -18455,7 +19222,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -18505,12 +19272,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -18527,22 +19296,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -18567,23 +19331,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18592,11 +19359,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18605,6 +19373,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -18612,8 +19381,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18622,6 +19392,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -18629,8 +19400,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18639,6 +19411,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -18646,8 +19419,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18656,12 +19430,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18670,11 +19449,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18683,13 +19463,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputSource is not None and "outputSource" not in r: u = save_relative_uri( self.outputSource, str(self.id), False, 1, relative_uris ) r["outputSource"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18698,6 +19479,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.linkMerge is not None and "linkMerge" not in r: r["linkMerge"] = save( @@ -18705,8 +19487,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18715,12 +19498,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18729,6 +19517,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -18889,7 +19678,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -18916,7 +19705,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `source` field is not valid because:", + "the 'source' field is not valid because:", SourceLine(_doc, "source", str), [e], ) @@ -18934,7 +19723,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `linkMerge` field is not valid because:", + "the 'linkMerge' field is not valid because:", SourceLine(_doc, "linkMerge", str), [e], ) @@ -18952,7 +19741,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -18970,7 +19759,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", + "the 'loadListing' field is not valid because:", SourceLine(_doc, "loadListing", str), [e], ) @@ -18988,7 +19777,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -19006,7 +19795,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `default` field is not valid because:", + "the 'default' field is not valid because:", SourceLine(_doc, "default", str), [e], ) @@ -19024,7 +19813,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `valueFrom` field is not valid because:", + "the 'valueFrom' field is not valid because:", SourceLine(_doc, "valueFrom", str), [e], ) @@ -19073,12 +19862,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -19095,22 +19886,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -19135,23 +19921,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19160,11 +19949,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19173,11 +19963,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.source is not None and "source" not in r: u = save_relative_uri(self.source, str(self.id), False, 2, relative_uris) r["source"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19186,6 +19977,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.linkMerge is not None and "linkMerge" not in r: r["linkMerge"] = save( @@ -19193,8 +19985,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19203,6 +19996,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -19210,8 +20004,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19220,6 +20015,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -19227,8 +20023,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19237,6 +20034,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -19244,8 +20042,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19254,6 +20053,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -19261,8 +20061,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19271,6 +20072,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.valueFrom is not None and "valueFrom" not in r: r["valueFrom"] = save( @@ -19278,8 +20080,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19288,6 +20091,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -19374,7 +20178,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -19423,12 +20227,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -19445,22 +20251,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -19485,23 +20286,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19510,11 +20314,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19523,6 +20328,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -19687,7 +20493,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -19714,7 +20520,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -19732,7 +20538,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -19749,7 +20555,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `in` field is not valid because:", + "the 'in' field is not valid because:", SourceLine(_doc, "in", str), [e], ) @@ -19764,7 +20570,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `out` field is not valid because:", + "the 'out' field is not valid because:", SourceLine(_doc, "out", str), [e], ) @@ -19780,7 +20586,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `requirements` field is not valid because:", + "the 'requirements' field is not valid because:", SourceLine(_doc, "requirements", str), [e], ) @@ -19798,7 +20604,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `hints` field is not valid because:", + "the 'hints' field is not valid because:", SourceLine(_doc, "hints", str), [e], ) @@ -19806,7 +20612,7 @@ def fromDoc( else: hints = None - subscope_baseuri = expand_url("run", baseuri, loadingOptions, True) + subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) try: run = load_field( _doc.get("run"), @@ -19817,7 +20623,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `run` field is not valid because:", + "the 'run' field is not valid because:", SourceLine(_doc, "run", str), [e], ) @@ -19833,7 +20639,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `scatter` field is not valid because:", + "the 'scatter' field is not valid because:", SourceLine(_doc, "scatter", str), [e], ) @@ -19851,7 +20657,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `scatterMethod` field is not valid because:", + "the 'scatterMethod' field is not valid because:", SourceLine(_doc, "scatterMethod", str), [e], ) @@ -19902,12 +20708,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -19924,22 +20732,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -19964,23 +20767,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19989,11 +20795,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20002,6 +20809,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -20009,8 +20817,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20019,12 +20828,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20033,12 +20847,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.in_ is not None and "in" not in r: r["in"] = save( - self.in_, top=False, base_url=str(self.id), relative_uris=relative_uris + self.in_, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20047,11 +20866,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.out is not None and "out" not in r: u = save_relative_uri(self.out, str(self.id), True, None, relative_uris) r["out"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20060,6 +20880,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -20067,8 +20888,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20077,6 +20899,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -20084,8 +20907,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20094,11 +20918,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.run is not None and "run" not in r: u = save_relative_uri(self.run, str(self.id), False, None, relative_uris) r["run"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20107,11 +20932,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.scatter is not None and "scatter" not in r: u = save_relative_uri(self.scatter, str(self.id), False, 0, relative_uris) r["scatter"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20120,13 +20946,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.scatterMethod is not None and "scatterMethod" not in r: u = save_relative_uri( self.scatterMethod, str(self.id), False, None, relative_uris ) r["scatterMethod"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20135,6 +20962,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -20305,7 +21133,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -20332,7 +21160,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -20350,7 +21178,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -20367,7 +21195,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputs` field is not valid because:", + "the 'inputs' field is not valid because:", SourceLine(_doc, "inputs", str), [e], ) @@ -20382,7 +21210,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputs` field is not valid because:", + "the 'outputs' field is not valid because:", SourceLine(_doc, "outputs", str), [e], ) @@ -20398,7 +21226,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `requirements` field is not valid because:", + "the 'requirements' field is not valid because:", SourceLine(_doc, "requirements", str), [e], ) @@ -20416,7 +21244,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `hints` field is not valid because:", + "the 'hints' field is not valid because:", SourceLine(_doc, "hints", str), [e], ) @@ -20434,7 +21262,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", + "the 'cwlVersion' field is not valid because:", SourceLine(_doc, "cwlVersion", str), [e], ) @@ -20451,7 +21279,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `steps` field is not valid because:", + "the 'steps' field is not valid because:", SourceLine(_doc, "steps", str), [e], ) @@ -20499,12 +21327,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -20521,22 +21351,17 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc["id"] = temp_id - temp_doc.lc.add_kv_line_col( - "id", - [ - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4, - ], - ) + temp_doc['id'] = temp_id + temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1], + doc.lc.data[temp_id][0], + doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -20563,23 +21388,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20588,11 +21416,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20601,6 +21430,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -20608,8 +21438,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20618,12 +21449,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20632,6 +21468,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -20639,8 +21476,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20649,6 +21487,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -20656,8 +21495,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20666,6 +21506,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -20673,8 +21514,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20683,6 +21525,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -20690,8 +21533,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20700,13 +21544,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.cwlVersion is not None and "cwlVersion" not in r: u = save_relative_uri( self.cwlVersion, str(self.id), False, None, relative_uris ) r["cwlVersion"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20715,6 +21560,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.steps is not None and "steps" not in r: r["steps"] = save( @@ -20722,8 +21568,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20732,6 +21579,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -20839,12 +21687,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -20879,23 +21729,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20904,6 +21757,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) # top refers to the directory level @@ -20998,12 +21852,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -21038,23 +21894,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21063,6 +21922,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) # top refers to the directory level @@ -21157,12 +22017,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -21197,23 +22059,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21222,6 +22087,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) # top refers to the directory level @@ -21316,12 +22182,14 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -21356,23 +22224,26 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21381,6 +22252,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info ) # top refers to the directory level @@ -21655,7 +22527,23 @@ def save( ), "PrimitiveType", ) +""" +Salad data types are based on Avro schema declarations. Refer to the +[Avro schema declaration documentation](https://avro.apache.org/docs/current/spec.html#schemas) for +detailed information. + +null: no value +boolean: a binary value +int: 32-bit signed integer +long: 64-bit signed integer +float: single precision (32-bit) IEEE 754 floating-point number +double: double precision (64-bit) IEEE 754 floating-point number +string: Unicode character sequence +""" AnyLoader = _EnumLoader(("Any",), "Any") +""" +The **Any** type validates for any non-null value. +""" RecordFieldLoader = _RecordLoader(RecordField) RecordSchemaLoader = _RecordLoader(RecordSchema) EnumSchemaLoader = _RecordLoader(EnumSchema) @@ -21679,6 +22567,9 @@ def save( ), "CWLVersion", ) +""" +Version symbols for published CWL document versions. +""" CWLTypeLoader = _EnumLoader( ( "null", @@ -21693,6 +22584,11 @@ def save( ), "CWLType", ) +""" +Extends primitive types with the concept of a file and directory as a builtin type. +File: A File object +Directory: A Directory object +""" FileLoader = _RecordLoader(File) DirectoryLoader = _RecordLoader(Directory) LoadListingEnumLoader = _EnumLoader( @@ -21703,6 +22599,14 @@ def save( ), "LoadListingEnum", ) +""" +Specify the desired behavior for loading the `listing` field of +a Directory object for use by expressions. + +no_listing: Do not load the directory listing. +shallow_listing: Only load the top level listing, do not recurse into subdirectories. +deep_listing: Load the directory listing and recursively load all subdirectories as well. +""" ExpressionLoader = _ExpressionLoader(str) InputBindingLoader = _RecordLoader(InputBinding) InputRecordFieldLoader = _RecordLoader(InputRecordField) @@ -21732,8 +22636,115 @@ def save( CommandInputParameterLoader = _RecordLoader(CommandInputParameter) CommandOutputParameterLoader = _RecordLoader(CommandOutputParameter) stdinLoader = _EnumLoader(("stdin",), "stdin") +""" +Only valid as a `type` for a `CommandLineTool` input with no +`inputBinding` set. `stdin` must not be specified at the `CommandLineTool` +level. + +The following +``` +inputs: + an_input_name: + type: stdin +``` +is equivalent to +``` +inputs: + an_input_name: + type: File + streamable: true + +stdin: ${inputs.an_input_name.path} +``` +""" stdoutLoader = _EnumLoader(("stdout",), "stdout") +""" +Only valid as a `type` for a `CommandLineTool` output with no +`outputBinding` set. + +The following +``` +outputs: + an_output_name: + type: stdout + +stdout: a_stdout_file +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stdout_file + +stdout: a_stdout_file +``` + +If there is no `stdout` name provided, a random filename will be created. +For example, the following +``` +outputs: + an_output_name: + type: stdout +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stdout_filenameABCDEFG + +stdout: random_stdout_filenameABCDEFG +``` +""" stderrLoader = _EnumLoader(("stderr",), "stderr") +""" +Only valid as a `type` for a `CommandLineTool` output with no +`outputBinding` set. + +The following +``` +outputs: + an_output_name: + type: stderr + +stderr: a_stderr_file +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stderr_file + +stderr: a_stderr_file +``` + +If there is no `stderr` name provided, a random filename will be created. +For example, the following +``` +outputs: + an_output_name: + type: stderr +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stderr_filenameABCDEFG + +stderr: random_stderr_filenameABCDEFG +``` +""" CommandLineToolLoader = _RecordLoader(CommandLineTool) DockerRequirementLoader = _RecordLoader(DockerRequirement) SoftwareRequirementLoader = _RecordLoader(SoftwareRequirement) @@ -21757,6 +22768,9 @@ def save( ), "LinkMergeMethod", ) +""" +The input link merge method, described in [WorkflowStepInput](#WorkflowStepInput). +""" WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter) WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput) WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput) @@ -21768,6 +22782,9 @@ def save( ), "ScatterMethod", ) +""" +The scatter method, as described in [workflow step scatter](#WorkflowStep). +""" WorkflowStepLoader = _RecordLoader(WorkflowStep) WorkflowLoader = _RecordLoader(Workflow) SubworkflowFeatureRequirementLoader = _RecordLoader(SubworkflowFeatureRequirement) diff --git a/schema_salad/tests/cwl_v1_2.py b/schema_salad/tests/cwl_v1_2.py index 577cccdd8..c849c59bc 100644 --- a/schema_salad/tests/cwl_v1_2.py +++ b/schema_salad/tests/cwl_v1_2.py @@ -45,10 +45,7 @@ IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] - doc_line_info = CommentedMap() -inserted_line_info: Dict[int, int] = {} - class LoadingOptions: idx: IdxType @@ -138,9 +135,7 @@ def __init__( ) self.fetcher: Fetcher = DefaultFetcher({}, session) - self.cache = ( - self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} - ) + self.cache = self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} self.vocab = _vocab self.rvocab = _rvocab @@ -172,9 +167,7 @@ def graph(self) -> Graph: try: content = self.fetcher.fetch_text(fetchurl) except Exception as e: - _logger.warning( - "Could not load extension schema %s: %s", fetchurl, str(e) - ) + _logger.warning("Could not load extension schema %s: %s", fetchurl, str(e)) continue newGraph = Graph() err_msg = "unknown error" @@ -187,9 +180,7 @@ def graph(self) -> Graph: except (xml.sax.SAXParseException, TypeError, BadSyntax) as e: err_msg = str(e) else: - _logger.warning( - "Could not load extension schema %s: %s", fetchurl, err_msg - ) + _logger.warning("Could not load extension schema %s: %s", fetchurl, err_msg) self.cache[key] = graph return graph @@ -233,20 +224,16 @@ def load_field(val, fieldtype, baseuri, loadingOptions): ) loadingOptions.imports.append(url) return result - elif "$include" in val: + if "$include" in val: if loadingOptions.fileuri is None: raise SchemaSaladException("Cannot load $import without fileuri") - url = loadingOptions.fetcher.urljoin( - loadingOptions.fileuri, val["$include"] - ) + url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) val = loadingOptions.fetcher.fetch_text(url) loadingOptions.includes.append(url) return fieldtype.load(val, baseuri, loadingOptions) -save_type = Optional[ - Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str] -] +save_type = Optional[Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str]] def add_kv( @@ -258,6 +245,7 @@ def add_kv( max_len: int, cols: Dict[int, int], min_col: int = 0, + inserted_line_info: Dict[int, int] = {} ) -> int: """Add key value pair into Commented Map. @@ -290,7 +278,7 @@ def add_kv( ], ) inserted_line_info[line] = old_doc.lc.data[key][1] - return max_len + return max_len, inserted_line_info elif isinstance(val, (int, float, str)) and not isinstance( val, bool ): # If the value is hashable @@ -305,7 +293,7 @@ def add_kv( new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) inserted_line_info[line] = col + len(key) + 2 cols[line] = col + len("id") + 2 - return max_len + return max_len, inserted_line_info elif isinstance(val, str): if val + "?" in line_numbers: line = line_numbers[val + "?"]["line"] @@ -318,7 +306,7 @@ def add_kv( new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) inserted_line_info[line] = col + len(key) + 2 cols[line] = col + len("id") + 2 - return max_len + return max_len, inserted_line_info elif old_doc: if val in old_doc: index = old_doc.lc.data.index(val) @@ -344,7 +332,7 @@ def add_kv( key, [max_line, min_col, max_line, min_col + len(key) + 2] ) inserted_line_info[max_line] = min_col + len(key) + 2 - return max_len + 1 + return max_len + 1, inserted_line_info def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: @@ -402,6 +390,7 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> save_type: """Save a val of any type. @@ -425,7 +414,7 @@ def save( if isinstance(val, Saveable): return val.save( - top=top, base_url=base_url, relative_uris=relative_uris, keys=keys + top=top, base_url=base_url, relative_uris=relative_uris, keys=keys, inserted_line_info=inserted_line_info ) if isinstance(val, MutableSequence): r = CommentedSeq() @@ -443,17 +432,16 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=new_keys, + inserted_line_info=inserted_line_info ) ) return r - # return [ - # save(v, top=False, base_url=base_url, relative_uris=relative_uris) - # for v in val - # ] + if isinstance(val, MutableMapping): newdict = CommentedMap() new_keys = keys for key in val: + if doc: if key in doc: newdict.lc.add_kv_line_col(key, doc.lc.data[key]) @@ -465,14 +453,10 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=new_keys, + inserted_line_info=inserted_line_info, ) + return newdict - # newdict = {} - # for key in val: - # newdict[key] = save( - # val[key], top=False, base_url=base_url, relative_uris=relative_uris - # ) - # return newdict if val is None or isinstance(val, (int, float, bool, str)): return val raise Exception("Not Saveable: %s" % type(val)) @@ -529,10 +513,7 @@ def expand_url( split = urlsplit(url) if ( - ( - bool(split.scheme) - and split.scheme in loadingOptions.fetcher.supported_schemes() - ) + (bool(split.scheme) and split.scheme in loadingOptions.fetcher.supported_schemes()) or url.startswith("$(") or url.startswith("${") ): @@ -572,7 +553,7 @@ def expand_url( if url in loadingOptions.rvocab: return loadingOptions.rvocab[url] else: - raise ValidationException(f"Term '{url}' not in vocabulary") + raise ValidationException(f"Term {url!r} not in vocabulary") return url @@ -623,9 +604,7 @@ def load(self, doc, baseuri, loadingOptions, docRoot=None): errors = [] # type: List[SchemaSaladException] for i in range(0, len(doc)): try: - lf = load_field( - doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions - ) + lf = load_field(doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions) if isinstance(lf, MutableSequence): r.extend(lf) else: @@ -649,8 +628,7 @@ def load(self, doc, baseuri, loadingOptions, docRoot=None): # type: (Any, str, LoadingOptions, Optional[str]) -> Any if doc in self.symbols: return doc - else: - raise ValidationException(f"Expected one of {self.symbols}") + raise ValidationException(f"Expected one of {self.symbols}") def __repr__(self): # type: () -> str return self.name @@ -678,9 +656,7 @@ def load(self, doc, baseuri, loadingOptions, docRoot=None): new_dict["pattern"] = dict_copy.pop("pattern") else: raise ValidationException( - "Missing pattern in secondaryFiles specification entry: {}".format( - d - ) + f"Missing pattern in secondaryFiles specification entry: {d}" ) new_dict["required"] = ( dict_copy.pop("required") if "required" in dict_copy else None @@ -705,19 +681,13 @@ def load(self, doc, baseuri, loadingOptions, docRoot=None): new_dict["pattern"] = doc_copy.pop("pattern") else: raise ValidationException( - "Missing pattern in secondaryFiles specification entry: {}".format( - doc - ) + f"Missing pattern in secondaryFiles specification entry: {doc}" ) - new_dict["required"] = ( - doc_copy.pop("required") if "required" in doc_copy else None - ) + new_dict["required"] = doc_copy.pop("required") if "required" in doc_copy else None if len(doc_copy): raise ValidationException( - "Unallowed values in secondaryFiles specification entry: {}".format( - doc_copy - ) + f"Unallowed values in secondaryFiles specification entry: {doc_copy}" ) r.append(new_dict) @@ -833,9 +803,7 @@ def resolve( if m: group1 = m.group(1) assert group1 is not None # nosec - first = expand_url( - group1, baseuri, loadingOptions, False, True, self.refScope - ) + first = expand_url(group1, baseuri, loadingOptions, False, True, self.refScope) second = third = None if bool(m.group(2)): second = {"type": "array", "items": first} @@ -944,11 +912,6 @@ def _document_load( addl_metadata=addl_metadata, ) - # doc = { - # k: v - # for k, v in doc.items() - # if k not in ("$namespaces", "$schemas", "$base") - # } doc = copy.copy(doc) if "$namespaces" in doc: doc.pop("$namespaces") @@ -1000,10 +963,7 @@ def _document_load_by_url( doc_url, frg = urldefrag(url) text = loadingOptions.fetcher.fetch_text(doc_url) - if isinstance(text, bytes): - textIO = StringIO(text.decode("utf-8")) - else: - textIO = StringIO(text) + textIO = StringIO(text) textIO.name = str(doc_url) yaml = yaml_no_ts() result = yaml.load(textIO) @@ -1034,8 +994,7 @@ def file_uri(path, split_frag=False): # type: (str, bool) -> str frag = "" if urlpath.startswith("//"): return f"file:{urlpath}{frag}" - else: - return f"file://{urlpath}{frag}" + return f"file://{urlpath}{frag}" def prefix_url(url: str, namespaces: Dict[str, str]) -> str: @@ -1055,10 +1014,7 @@ def save_relative_uri( ) -> Any: """Convert any URI to a relative one, obeying the scoping rules.""" if isinstance(uri, MutableSequence): - return [ - save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) - for u in uri - ] + return [save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) for u in uri] elif isinstance(uri, str): if not relative_uris or uri == base_url: return uri @@ -1082,8 +1038,7 @@ def save_relative_uri( if urisplit.fragment.startswith(basefrag): return urisplit.fragment[len(basefrag) :] - else: - return urisplit.fragment + return urisplit.fragment return uri else: return save(uri, top=False, base_url=base_url, relative_uris=relative_uris) @@ -1171,7 +1126,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -1198,7 +1153,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -1215,7 +1170,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -1256,13 +1211,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -1302,6 +1259,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -1313,7 +1271,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1321,12 +1279,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1335,6 +1294,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -1342,8 +1302,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1352,6 +1313,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -1359,8 +1321,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1369,6 +1332,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -1434,7 +1398,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `fields` field is not valid because:", + "the 'fields' field is not valid because:", SourceLine(_doc, "fields", str), [e], ) @@ -1451,7 +1415,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -1490,13 +1454,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -1536,6 +1502,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -1547,7 +1514,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1555,13 +1522,18 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.fields is not None and "fields" not in r: r["fields"] = save( - self.fields, top=False, base_url=base_url, relative_uris=relative_uris + self.fields, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1570,12 +1542,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1584,6 +1561,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -1660,7 +1638,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -1686,7 +1664,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", + "the 'symbols' field is not valid because:", SourceLine(_doc, "symbols", str), [e], ) @@ -1701,7 +1679,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -1742,13 +1720,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -1788,6 +1768,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -1799,7 +1780,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1807,12 +1788,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1821,13 +1803,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri( self.symbols, str(self.name), True, None, relative_uris ) r["symbols"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1836,6 +1819,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -1843,8 +1827,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1853,6 +1838,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -1917,7 +1903,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `items` field is not valid because:", + "the 'items' field is not valid because:", SourceLine(_doc, "items", str), [e], ) @@ -1932,7 +1918,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -1971,13 +1957,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -2017,6 +2005,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -2028,7 +2017,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2036,12 +2025,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.items is not None and "items" not in r: u = save_relative_uri(self.items, base_url, False, 2, relative_uris) r["items"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2050,12 +2040,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2064,6 +2059,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -2250,7 +2246,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `location` field is not valid because:", + "the 'location' field is not valid because:", SourceLine(_doc, "location", str), [e], ) @@ -2268,7 +2264,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `path` field is not valid because:", + "the 'path' field is not valid because:", SourceLine(_doc, "path", str), [e], ) @@ -2286,7 +2282,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `basename` field is not valid because:", + "the 'basename' field is not valid because:", SourceLine(_doc, "basename", str), [e], ) @@ -2304,7 +2300,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dirname` field is not valid because:", + "the 'dirname' field is not valid because:", SourceLine(_doc, "dirname", str), [e], ) @@ -2322,7 +2318,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `nameroot` field is not valid because:", + "the 'nameroot' field is not valid because:", SourceLine(_doc, "nameroot", str), [e], ) @@ -2340,7 +2336,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `nameext` field is not valid because:", + "the 'nameext' field is not valid because:", SourceLine(_doc, "nameext", str), [e], ) @@ -2358,7 +2354,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `checksum` field is not valid because:", + "the 'checksum' field is not valid because:", SourceLine(_doc, "checksum", str), [e], ) @@ -2376,7 +2372,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `size` field is not valid because:", + "the 'size' field is not valid because:", SourceLine(_doc, "size", str), [e], ) @@ -2394,7 +2390,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -2412,7 +2408,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -2430,7 +2426,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `contents` field is not valid because:", + "the 'contents' field is not valid because:", SourceLine(_doc, "contents", str), [e], ) @@ -2480,13 +2476,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -2528,6 +2526,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -2539,7 +2538,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2547,12 +2546,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) r["location"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2561,11 +2561,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.path is not None and "path" not in r: u = save_relative_uri(self.path, base_url, False, None, relative_uris) r["path"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2574,12 +2575,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.basename is not None and "basename" not in r: r["basename"] = save( - self.basename, top=False, base_url=base_url, relative_uris=relative_uris + self.basename, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2588,12 +2594,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dirname is not None and "dirname" not in r: r["dirname"] = save( - self.dirname, top=False, base_url=base_url, relative_uris=relative_uris + self.dirname, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2602,12 +2613,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.nameroot is not None and "nameroot" not in r: r["nameroot"] = save( - self.nameroot, top=False, base_url=base_url, relative_uris=relative_uris + self.nameroot, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2616,12 +2632,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.nameext is not None and "nameext" not in r: r["nameext"] = save( - self.nameext, top=False, base_url=base_url, relative_uris=relative_uris + self.nameext, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2630,12 +2651,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.checksum is not None and "checksum" not in r: r["checksum"] = save( - self.checksum, top=False, base_url=base_url, relative_uris=relative_uris + self.checksum, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2644,12 +2670,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.size is not None and "size" not in r: r["size"] = save( - self.size, top=False, base_url=base_url, relative_uris=relative_uris + self.size, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2658,6 +2689,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -2665,8 +2697,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2675,11 +2708,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, base_url, True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2688,12 +2722,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.contents is not None and "contents" not in r: r["contents"] = save( - self.contents, top=False, base_url=base_url, relative_uris=relative_uris + self.contents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2702,6 +2741,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -2846,7 +2886,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `location` field is not valid because:", + "the 'location' field is not valid because:", SourceLine(_doc, "location", str), [e], ) @@ -2864,7 +2904,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `path` field is not valid because:", + "the 'path' field is not valid because:", SourceLine(_doc, "path", str), [e], ) @@ -2882,7 +2922,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `basename` field is not valid because:", + "the 'basename' field is not valid because:", SourceLine(_doc, "basename", str), [e], ) @@ -2900,7 +2940,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `listing` field is not valid because:", + "the 'listing' field is not valid because:", SourceLine(_doc, "listing", str), [e], ) @@ -2943,13 +2983,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -2991,6 +3033,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -3002,7 +3045,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3010,12 +3053,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) r["location"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3024,11 +3068,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.path is not None and "path" not in r: u = save_relative_uri(self.path, base_url, False, None, relative_uris) r["path"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3037,12 +3082,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.basename is not None and "basename" not in r: r["basename"] = save( - self.basename, top=False, base_url=base_url, relative_uris=relative_uris + self.basename, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3051,12 +3101,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.listing is not None and "listing" not in r: r["listing"] = save( - self.listing, top=False, base_url=base_url, relative_uris=relative_uris + self.listing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3065,6 +3120,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -3161,7 +3217,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -3201,13 +3257,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -3247,6 +3305,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -3258,7 +3317,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3266,7 +3325,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -3274,8 +3334,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3284,6 +3345,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -3397,7 +3459,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -3424,7 +3486,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -3441,7 +3503,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -3457,7 +3519,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -3475,7 +3537,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -3493,7 +3555,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -3511,7 +3573,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -3529,7 +3591,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -3547,7 +3609,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", + "the 'loadListing' field is not valid because:", SourceLine(_doc, "loadListing", str), [e], ) @@ -3596,13 +3658,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -3642,6 +3706,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -3653,7 +3718,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3661,12 +3726,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3675,6 +3741,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -3682,8 +3749,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3692,6 +3760,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -3699,8 +3768,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3709,6 +3779,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -3716,8 +3787,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3726,6 +3798,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -3733,8 +3806,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3743,6 +3817,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -3750,8 +3825,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3760,13 +3836,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri( self.format, str(self.name), True, None, relative_uris ) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3775,6 +3852,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -3782,8 +3860,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3792,6 +3871,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -3799,8 +3879,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3809,6 +3890,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -3898,7 +3980,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -3925,7 +4007,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `fields` field is not valid because:", + "the 'fields' field is not valid because:", SourceLine(_doc, "fields", str), [e], ) @@ -3942,7 +4024,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -3958,7 +4040,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -3976,7 +4058,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -4021,13 +4103,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -4067,6 +4151,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -4078,7 +4163,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4086,12 +4171,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4100,6 +4186,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -4107,8 +4194,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4117,6 +4205,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4124,8 +4213,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4134,6 +4224,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -4141,8 +4232,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4151,6 +4243,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -4158,8 +4251,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4168,6 +4262,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -4245,7 +4340,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -4271,7 +4366,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", + "the 'symbols' field is not valid because:", SourceLine(_doc, "symbols", str), [e], ) @@ -4286,7 +4381,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -4302,7 +4397,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -4320,7 +4415,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -4365,13 +4460,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -4411,6 +4508,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -4422,7 +4520,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4430,12 +4528,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4444,13 +4543,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri( self.symbols, str(self.name), True, None, relative_uris ) r["symbols"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4459,6 +4559,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4466,8 +4567,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4476,6 +4578,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -4483,8 +4586,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4493,6 +4597,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -4500,8 +4605,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4510,6 +4616,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -4587,7 +4694,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -4613,7 +4720,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `items` field is not valid because:", + "the 'items' field is not valid because:", SourceLine(_doc, "items", str), [e], ) @@ -4628,7 +4735,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -4644,7 +4751,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -4662,7 +4769,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -4707,13 +4814,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -4753,6 +4862,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -4764,7 +4874,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4772,12 +4882,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4786,11 +4897,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.items is not None and "items" not in r: u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) r["items"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4799,6 +4911,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4806,8 +4919,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4816,6 +4930,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -4823,8 +4938,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4833,6 +4949,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -4840,8 +4957,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4850,6 +4968,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -4943,7 +5062,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -4970,7 +5089,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -4987,7 +5106,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -5003,7 +5122,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -5021,7 +5140,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -5039,7 +5158,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -5057,7 +5176,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -5104,13 +5223,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -5150,6 +5271,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -5161,7 +5283,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5169,12 +5291,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5183,6 +5306,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -5190,8 +5314,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5200,6 +5325,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -5207,8 +5333,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5217,6 +5344,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -5224,8 +5352,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5234,6 +5363,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -5241,8 +5371,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5251,6 +5382,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -5258,8 +5390,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5268,13 +5401,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri( self.format, str(self.name), True, None, relative_uris ) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5283,6 +5417,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -5362,7 +5497,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -5389,7 +5524,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `fields` field is not valid because:", + "the 'fields' field is not valid because:", SourceLine(_doc, "fields", str), [e], ) @@ -5406,7 +5541,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -5422,7 +5557,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -5440,7 +5575,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -5485,13 +5620,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -5531,6 +5668,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -5542,7 +5680,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5550,12 +5688,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5564,6 +5703,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -5571,8 +5711,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5581,6 +5722,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -5588,8 +5730,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5598,6 +5741,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -5605,8 +5749,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5615,6 +5760,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -5622,8 +5768,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5632,6 +5779,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -5709,7 +5857,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -5735,7 +5883,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", + "the 'symbols' field is not valid because:", SourceLine(_doc, "symbols", str), [e], ) @@ -5750,7 +5898,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -5766,7 +5914,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -5784,7 +5932,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -5829,13 +5977,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -5875,6 +6025,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -5886,7 +6037,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5894,12 +6045,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5908,13 +6060,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri( self.symbols, str(self.name), True, None, relative_uris ) r["symbols"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5923,6 +6076,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -5930,8 +6084,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5940,6 +6095,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -5947,8 +6103,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5957,6 +6114,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -5964,8 +6122,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -5974,6 +6133,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -6051,7 +6211,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -6077,7 +6237,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `items` field is not valid because:", + "the 'items' field is not valid because:", SourceLine(_doc, "items", str), [e], ) @@ -6092,7 +6252,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -6108,7 +6268,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -6126,7 +6286,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -6171,13 +6331,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -6217,6 +6379,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -6228,7 +6391,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6236,12 +6399,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6250,11 +6414,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.items is not None and "items" not in r: u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) r["items"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6263,6 +6428,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -6270,8 +6436,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6280,6 +6447,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -6287,8 +6455,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6297,6 +6466,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -6304,8 +6474,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6314,6 +6485,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -6426,7 +6598,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `expressionLib` field is not valid because:", + "the 'expressionLib' field is not valid because:", SourceLine(_doc, "expressionLib", str), [e], ) @@ -6468,13 +6640,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -6516,6 +6690,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -6527,7 +6702,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6535,7 +6710,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.expressionLib is not None and "expressionLib" not in r: r["expressionLib"] = save( @@ -6543,8 +6719,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6553,6 +6730,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -6640,7 +6818,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `types` field is not valid because:", + "the 'types' field is not valid because:", SourceLine(_doc, "types", str), [e], ) @@ -6678,13 +6856,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -6726,6 +6906,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -6737,7 +6918,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6745,13 +6926,18 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.types is not None and "types" not in r: r["types"] = save( - self.types, top=False, base_url=base_url, relative_uris=relative_uris + self.types, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6760,6 +6946,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -6843,7 +7030,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `pattern` field is not valid because:", + "the 'pattern' field is not valid because:", SourceLine(_doc, "pattern", str), [e], ) @@ -6859,7 +7046,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `required` field is not valid because:", + "the 'required' field is not valid because:", SourceLine(_doc, "required", str), [e], ) @@ -6900,13 +7087,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -6946,6 +7135,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -6957,7 +7147,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6965,13 +7155,18 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.pattern is not None and "pattern" not in r: r["pattern"] = save( - self.pattern, top=False, base_url=base_url, relative_uris=relative_uris + self.pattern, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6980,12 +7175,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.required is not None and "required" not in r: r["required"] = save( - self.required, top=False, base_url=base_url, relative_uris=relative_uris + self.required, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -6994,6 +7194,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -7070,7 +7271,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", + "the 'loadListing' field is not valid because:", SourceLine(_doc, "loadListing", str), [e], ) @@ -7112,13 +7313,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -7160,6 +7363,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -7171,7 +7375,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7179,7 +7383,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -7187,8 +7392,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7197,6 +7403,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -7270,7 +7477,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `envName` field is not valid because:", + "the 'envName' field is not valid because:", SourceLine(_doc, "envName", str), [e], ) @@ -7285,7 +7492,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `envValue` field is not valid because:", + "the 'envValue' field is not valid because:", SourceLine(_doc, "envValue", str), [e], ) @@ -7324,13 +7531,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -7370,6 +7579,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -7381,7 +7591,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7389,13 +7599,18 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.envName is not None and "envName" not in r: r["envName"] = save( - self.envName, top=False, base_url=base_url, relative_uris=relative_uris + self.envName, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7404,12 +7619,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.envValue is not None and "envValue" not in r: r["envValue"] = save( - self.envValue, top=False, base_url=base_url, relative_uris=relative_uris + self.envValue, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7418,6 +7638,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -7550,7 +7771,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -7568,7 +7789,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `position` field is not valid because:", + "the 'position' field is not valid because:", SourceLine(_doc, "position", str), [e], ) @@ -7586,7 +7807,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `prefix` field is not valid because:", + "the 'prefix' field is not valid because:", SourceLine(_doc, "prefix", str), [e], ) @@ -7604,7 +7825,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `separate` field is not valid because:", + "the 'separate' field is not valid because:", SourceLine(_doc, "separate", str), [e], ) @@ -7622,7 +7843,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `itemSeparator` field is not valid because:", + "the 'itemSeparator' field is not valid because:", SourceLine(_doc, "itemSeparator", str), [e], ) @@ -7640,7 +7861,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `valueFrom` field is not valid because:", + "the 'valueFrom' field is not valid because:", SourceLine(_doc, "valueFrom", str), [e], ) @@ -7658,7 +7879,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `shellQuote` field is not valid because:", + "the 'shellQuote' field is not valid because:", SourceLine(_doc, "shellQuote", str), [e], ) @@ -7704,13 +7925,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -7750,6 +7973,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -7761,7 +7985,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7769,7 +7993,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -7777,8 +8002,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7787,12 +8013,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.position is not None and "position" not in r: r["position"] = save( - self.position, top=False, base_url=base_url, relative_uris=relative_uris + self.position, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7801,12 +8032,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.prefix is not None and "prefix" not in r: r["prefix"] = save( - self.prefix, top=False, base_url=base_url, relative_uris=relative_uris + self.prefix, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7815,12 +8051,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.separate is not None and "separate" not in r: r["separate"] = save( - self.separate, top=False, base_url=base_url, relative_uris=relative_uris + self.separate, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7829,6 +8070,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.itemSeparator is not None and "itemSeparator" not in r: r["itemSeparator"] = save( @@ -7836,8 +8078,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7846,6 +8089,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.valueFrom is not None and "valueFrom" not in r: r["valueFrom"] = save( @@ -7853,8 +8097,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7863,6 +8108,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.shellQuote is not None and "shellQuote" not in r: r["shellQuote"] = save( @@ -7870,8 +8116,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -7880,6 +8127,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -7978,7 +8226,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -7996,7 +8244,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", + "the 'loadListing' field is not valid because:", SourceLine(_doc, "loadListing", str), [e], ) @@ -8014,7 +8262,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `glob` field is not valid because:", + "the 'glob' field is not valid because:", SourceLine(_doc, "glob", str), [e], ) @@ -8032,7 +8280,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputEval` field is not valid because:", + "the 'outputEval' field is not valid because:", SourceLine(_doc, "outputEval", str), [e], ) @@ -8075,13 +8323,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -8121,6 +8371,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -8132,7 +8383,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8140,7 +8391,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8148,8 +8400,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8158,6 +8411,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -8165,8 +8419,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8175,12 +8430,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.glob is not None and "glob" not in r: r["glob"] = save( - self.glob, top=False, base_url=base_url, relative_uris=relative_uris + self.glob, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8189,6 +8449,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputEval is not None and "outputEval" not in r: r["outputEval"] = save( @@ -8196,8 +8457,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8206,6 +8468,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -8269,7 +8532,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -8309,13 +8572,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -8355,6 +8620,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -8366,7 +8632,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8374,7 +8640,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -8382,8 +8649,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8392,6 +8660,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -8497,7 +8766,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -8524,7 +8793,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -8541,7 +8810,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -8557,7 +8826,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -8575,7 +8844,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -8593,7 +8862,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -8611,7 +8880,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -8629,7 +8898,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -8647,7 +8916,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", + "the 'loadListing' field is not valid because:", SourceLine(_doc, "loadListing", str), [e], ) @@ -8665,7 +8934,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -8717,13 +8986,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -8763,6 +9034,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -8774,7 +9046,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8782,12 +9054,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8796,6 +9069,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -8803,8 +9077,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8813,6 +9088,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -8820,8 +9096,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8830,6 +9107,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -8837,8 +9115,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8847,6 +9126,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -8854,8 +9134,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8864,6 +9145,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -8871,8 +9153,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8881,13 +9164,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri( self.format, str(self.name), True, None, relative_uris ) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8896,6 +9180,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8903,8 +9188,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8913,6 +9199,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -8920,8 +9207,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8930,6 +9218,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -8937,8 +9226,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -8947,6 +9237,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -9044,7 +9335,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -9071,7 +9362,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `fields` field is not valid because:", + "the 'fields' field is not valid because:", SourceLine(_doc, "fields", str), [e], ) @@ -9088,7 +9379,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -9104,7 +9395,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -9122,7 +9413,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -9140,7 +9431,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -9188,13 +9479,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -9234,6 +9527,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -9245,7 +9539,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9253,12 +9547,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9267,6 +9562,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -9274,8 +9570,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9284,6 +9581,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -9291,8 +9589,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9301,6 +9600,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -9308,8 +9608,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9318,6 +9619,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -9325,8 +9627,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9335,6 +9638,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -9342,8 +9646,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9352,6 +9657,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -9441,7 +9747,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -9467,7 +9773,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", + "the 'symbols' field is not valid because:", SourceLine(_doc, "symbols", str), [e], ) @@ -9482,7 +9788,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -9498,7 +9804,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -9516,7 +9822,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -9534,7 +9840,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -9582,13 +9888,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -9628,6 +9936,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -9639,7 +9948,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9647,12 +9956,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9661,13 +9971,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri( self.symbols, str(self.name), True, None, relative_uris ) r["symbols"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9676,6 +9987,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -9683,8 +9995,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9693,6 +10006,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -9700,8 +10014,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9710,6 +10025,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -9717,8 +10033,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9727,6 +10044,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -9734,8 +10052,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -9744,6 +10063,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -9828,7 +10148,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -9854,7 +10174,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `items` field is not valid because:", + "the 'items' field is not valid because:", SourceLine(_doc, "items", str), [e], ) @@ -9869,7 +10189,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -9885,7 +10205,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -9903,7 +10223,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -9921,7 +10241,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -9969,13 +10289,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -10015,6 +10337,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -10026,7 +10349,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10034,12 +10357,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10048,11 +10372,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.items is not None and "items" not in r: u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) r["items"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10061,6 +10386,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -10068,8 +10394,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10078,6 +10405,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -10085,8 +10413,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10095,6 +10424,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -10102,8 +10432,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10112,6 +10443,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -10119,8 +10451,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10129,6 +10462,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -10226,7 +10560,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -10253,7 +10587,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -10270,7 +10604,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -10286,7 +10620,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -10304,7 +10638,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -10322,7 +10656,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -10340,7 +10674,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -10358,7 +10692,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", + "the 'outputBinding' field is not valid because:", SourceLine(_doc, "outputBinding", str), [e], ) @@ -10408,13 +10742,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -10454,6 +10790,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -10465,7 +10802,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10473,12 +10810,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10487,6 +10825,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -10494,8 +10833,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10504,6 +10844,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -10511,8 +10852,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10521,6 +10863,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -10528,8 +10871,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10538,6 +10882,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -10545,8 +10890,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10555,6 +10901,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -10562,8 +10909,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10572,13 +10920,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri( self.format, str(self.name), True, None, relative_uris ) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10587,6 +10936,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -10594,8 +10944,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10604,6 +10955,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -10692,7 +11044,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -10719,7 +11071,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `fields` field is not valid because:", + "the 'fields' field is not valid because:", SourceLine(_doc, "fields", str), [e], ) @@ -10736,7 +11088,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -10752,7 +11104,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -10770,7 +11122,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -10817,13 +11169,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -10863,6 +11217,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -10874,7 +11229,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10882,12 +11237,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10896,6 +11252,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -10903,8 +11260,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10913,6 +11271,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -10920,8 +11279,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10930,6 +11290,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -10937,8 +11298,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10947,6 +11309,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -10954,8 +11317,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -10964,6 +11328,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -11041,7 +11406,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -11067,7 +11432,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", + "the 'symbols' field is not valid because:", SourceLine(_doc, "symbols", str), [e], ) @@ -11082,7 +11447,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -11098,7 +11463,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -11116,7 +11481,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -11163,13 +11528,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -11209,6 +11576,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -11220,7 +11588,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11228,12 +11596,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11242,13 +11611,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri( self.symbols, str(self.name), True, None, relative_uris ) r["symbols"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11257,6 +11627,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -11264,8 +11635,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11274,6 +11646,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -11281,8 +11654,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11291,6 +11665,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -11298,8 +11673,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11308,6 +11684,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -11385,7 +11762,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `name` field is not valid because:", + "the 'name' field is not valid because:", SourceLine(_doc, "name", str), [e], ) @@ -11411,7 +11788,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `items` field is not valid because:", + "the 'items' field is not valid because:", SourceLine(_doc, "items", str), [e], ) @@ -11426,7 +11803,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -11442,7 +11819,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -11460,7 +11837,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -11507,13 +11884,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -11553,6 +11932,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -11564,7 +11944,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11572,12 +11952,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11586,11 +11967,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.items is not None and "items" not in r: u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) r["items"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11599,6 +11981,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -11606,8 +11989,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11616,6 +12000,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -11623,8 +12008,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11633,6 +12019,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -11640,8 +12027,9 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -11650,6 +12038,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -11763,7 +12152,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -11790,7 +12179,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -11808,7 +12197,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -11826,7 +12215,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -11844,7 +12233,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -11862,7 +12251,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -11880,7 +12269,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -11898,7 +12287,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", + "the 'loadListing' field is not valid because:", SourceLine(_doc, "loadListing", str), [e], ) @@ -11916,7 +12305,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `default` field is not valid because:", + "the 'default' field is not valid because:", SourceLine(_doc, "default", str), [e], ) @@ -11933,7 +12322,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -11949,7 +12338,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -12000,13 +12389,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -12065,6 +12456,7 @@ def save( base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -12076,7 +12468,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12084,12 +12476,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12098,6 +12491,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -12105,8 +12499,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12115,6 +12510,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -12122,8 +12518,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12132,6 +12529,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -12139,8 +12537,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12149,12 +12548,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12163,11 +12567,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12176,6 +12581,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -12183,8 +12589,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12193,6 +12600,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -12200,8 +12608,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12210,6 +12619,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -12217,8 +12627,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12227,12 +12638,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12241,6 +12657,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -12248,8 +12665,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12258,6 +12676,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -12373,7 +12792,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -12400,7 +12819,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -12418,7 +12837,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -12436,7 +12855,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -12454,7 +12873,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -12472,7 +12891,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -12489,7 +12908,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -12505,7 +12924,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", + "the 'outputBinding' field is not valid because:", SourceLine(_doc, "outputBinding", str), [e], ) @@ -12555,13 +12974,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -12620,6 +13041,7 @@ def save( base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -12631,7 +13053,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12639,12 +13061,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12653,6 +13076,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -12660,8 +13084,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12670,6 +13095,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -12677,8 +13103,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12687,6 +13114,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -12694,8 +13122,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12704,12 +13133,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12718,11 +13152,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12731,12 +13166,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12745,6 +13185,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -12752,8 +13193,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -12762,6 +13204,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -12918,7 +13361,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -12945,7 +13388,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -12963,7 +13406,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -12980,7 +13423,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputs` field is not valid because:", + "the 'inputs' field is not valid because:", SourceLine(_doc, "inputs", str), [e], ) @@ -12995,7 +13438,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputs` field is not valid because:", + "the 'outputs' field is not valid because:", SourceLine(_doc, "outputs", str), [e], ) @@ -13011,7 +13454,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `requirements` field is not valid because:", + "the 'requirements' field is not valid because:", SourceLine(_doc, "requirements", str), [e], ) @@ -13029,7 +13472,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `hints` field is not valid because:", + "the 'hints' field is not valid because:", SourceLine(_doc, "hints", str), [e], ) @@ -13047,7 +13490,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", + "the 'cwlVersion' field is not valid because:", SourceLine(_doc, "cwlVersion", str), [e], ) @@ -13065,7 +13508,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `intent` field is not valid because:", + "the 'intent' field is not valid because:", SourceLine(_doc, "intent", str), [e], ) @@ -13083,7 +13526,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `baseCommand` field is not valid because:", + "the 'baseCommand' field is not valid because:", SourceLine(_doc, "baseCommand", str), [e], ) @@ -13101,7 +13544,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `arguments` field is not valid because:", + "the 'arguments' field is not valid because:", SourceLine(_doc, "arguments", str), [e], ) @@ -13119,7 +13562,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `stdin` field is not valid because:", + "the 'stdin' field is not valid because:", SourceLine(_doc, "stdin", str), [e], ) @@ -13137,7 +13580,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `stderr` field is not valid because:", + "the 'stderr' field is not valid because:", SourceLine(_doc, "stderr", str), [e], ) @@ -13155,7 +13598,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `stdout` field is not valid because:", + "the 'stdout' field is not valid because:", SourceLine(_doc, "stdout", str), [e], ) @@ -13173,7 +13616,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `successCodes` field is not valid because:", + "the 'successCodes' field is not valid because:", SourceLine(_doc, "successCodes", str), [e], ) @@ -13191,7 +13634,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `temporaryFailCodes` field is not valid because:", + "the 'temporaryFailCodes' field is not valid because:", SourceLine(_doc, "temporaryFailCodes", str), [e], ) @@ -13209,7 +13652,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `permanentFailCodes` field is not valid because:", + "the 'permanentFailCodes' field is not valid because:", SourceLine(_doc, "permanentFailCodes", str), [e], ) @@ -13266,13 +13709,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -13333,6 +13778,7 @@ def save( base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -13344,7 +13790,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13352,12 +13798,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13366,6 +13813,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -13373,8 +13821,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13383,12 +13832,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13397,6 +13851,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -13404,8 +13859,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13414,6 +13870,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -13421,8 +13878,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13431,6 +13889,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -13438,8 +13897,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13448,6 +13908,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -13455,8 +13916,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13465,13 +13927,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.cwlVersion is not None and "cwlVersion" not in r: u = save_relative_uri( self.cwlVersion, str(self.id), False, None, relative_uris ) r["cwlVersion"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13480,11 +13943,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.intent is not None and "intent" not in r: u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) r["intent"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13493,6 +13957,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.baseCommand is not None and "baseCommand" not in r: r["baseCommand"] = save( @@ -13500,8 +13965,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13510,6 +13976,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.arguments is not None and "arguments" not in r: r["arguments"] = save( @@ -13517,8 +13984,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13527,6 +13995,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.stdin is not None and "stdin" not in r: r["stdin"] = save( @@ -13534,8 +14003,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13544,6 +14014,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.stderr is not None and "stderr" not in r: r["stderr"] = save( @@ -13551,8 +14022,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13561,6 +14033,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.stdout is not None and "stdout" not in r: r["stdout"] = save( @@ -13568,8 +14041,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13578,6 +14052,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.successCodes is not None and "successCodes" not in r: r["successCodes"] = save( @@ -13585,8 +14060,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13595,6 +14071,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.temporaryFailCodes is not None and "temporaryFailCodes" not in r: r["temporaryFailCodes"] = save( @@ -13602,8 +14079,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13612,6 +14090,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.permanentFailCodes is not None and "permanentFailCodes" not in r: r["permanentFailCodes"] = save( @@ -13619,8 +14098,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -13629,6 +14109,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -13801,7 +14282,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerPull` field is not valid because:", + "the 'dockerPull' field is not valid because:", SourceLine(_doc, "dockerPull", str), [e], ) @@ -13819,7 +14300,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerLoad` field is not valid because:", + "the 'dockerLoad' field is not valid because:", SourceLine(_doc, "dockerLoad", str), [e], ) @@ -13837,7 +14318,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerFile` field is not valid because:", + "the 'dockerFile' field is not valid because:", SourceLine(_doc, "dockerFile", str), [e], ) @@ -13855,7 +14336,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerImport` field is not valid because:", + "the 'dockerImport' field is not valid because:", SourceLine(_doc, "dockerImport", str), [e], ) @@ -13873,7 +14354,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerImageId` field is not valid because:", + "the 'dockerImageId' field is not valid because:", SourceLine(_doc, "dockerImageId", str), [e], ) @@ -13891,7 +14372,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `dockerOutputDirectory` field is not valid because:", + "the 'dockerOutputDirectory' field is not valid because:", SourceLine(_doc, "dockerOutputDirectory", str), [e], ) @@ -13936,13 +14417,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -13984,6 +14467,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -13995,7 +14479,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14003,7 +14487,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.dockerPull is not None and "dockerPull" not in r: r["dockerPull"] = save( @@ -14011,8 +14496,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14021,6 +14507,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dockerLoad is not None and "dockerLoad" not in r: r["dockerLoad"] = save( @@ -14028,8 +14515,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14038,6 +14526,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dockerFile is not None and "dockerFile" not in r: r["dockerFile"] = save( @@ -14045,8 +14534,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14055,6 +14545,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dockerImport is not None and "dockerImport" not in r: r["dockerImport"] = save( @@ -14062,8 +14553,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14072,6 +14564,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dockerImageId is not None and "dockerImageId" not in r: r["dockerImageId"] = save( @@ -14079,8 +14572,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14089,6 +14583,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.dockerOutputDirectory is not None and "dockerOutputDirectory" not in r: r["dockerOutputDirectory"] = save( @@ -14096,8 +14591,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14106,6 +14602,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -14189,7 +14686,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `packages` field is not valid because:", + "the 'packages' field is not valid because:", SourceLine(_doc, "packages", str), [e], ) @@ -14227,13 +14724,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -14275,6 +14774,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -14286,7 +14786,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14294,13 +14794,18 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.packages is not None and "packages" not in r: r["packages"] = save( - self.packages, top=False, base_url=base_url, relative_uris=relative_uris + self.packages, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14309,6 +14814,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -14379,7 +14885,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `package` field is not valid because:", + "the 'package' field is not valid because:", SourceLine(_doc, "package", str), [e], ) @@ -14395,7 +14901,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `version` field is not valid because:", + "the 'version' field is not valid because:", SourceLine(_doc, "version", str), [e], ) @@ -14413,7 +14919,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `specs` field is not valid because:", + "the 'specs' field is not valid because:", SourceLine(_doc, "specs", str), [e], ) @@ -14455,13 +14961,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -14501,6 +15009,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -14512,7 +15021,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14520,13 +15029,18 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.package is not None and "package" not in r: r["package"] = save( - self.package, top=False, base_url=base_url, relative_uris=relative_uris + self.package, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14535,12 +15049,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.version is not None and "version" not in r: r["version"] = save( - self.version, top=False, base_url=base_url, relative_uris=relative_uris + self.version, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14549,11 +15068,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.specs is not None and "specs" not in r: u = save_relative_uri(self.specs, base_url, False, None, relative_uris) r["specs"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14562,6 +15082,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -14645,7 +15166,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `entryname` field is not valid because:", + "the 'entryname' field is not valid because:", SourceLine(_doc, "entryname", str), [e], ) @@ -14662,7 +15183,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `entry` field is not valid because:", + "the 'entry' field is not valid because:", SourceLine(_doc, "entry", str), [e], ) @@ -14678,7 +15199,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `writable` field is not valid because:", + "the 'writable' field is not valid because:", SourceLine(_doc, "writable", str), [e], ) @@ -14720,13 +15241,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -14766,6 +15289,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -14777,7 +15301,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14785,7 +15309,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.entryname is not None and "entryname" not in r: r["entryname"] = save( @@ -14793,8 +15318,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14803,12 +15329,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.entry is not None and "entry" not in r: r["entry"] = save( - self.entry, top=False, base_url=base_url, relative_uris=relative_uris + self.entry, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14817,12 +15348,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.writable is not None and "writable" not in r: r["writable"] = save( - self.writable, top=False, base_url=base_url, relative_uris=relative_uris + self.writable, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -14831,6 +15367,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -14903,7 +15440,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `listing` field is not valid because:", + "the 'listing' field is not valid because:", SourceLine(_doc, "listing", str), [e], ) @@ -14943,13 +15480,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -14991,6 +15530,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -15002,7 +15542,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15010,13 +15550,18 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.listing is not None and "listing" not in r: r["listing"] = save( - self.listing, top=False, base_url=base_url, relative_uris=relative_uris + self.listing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15025,6 +15570,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -15098,7 +15644,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `envDef` field is not valid because:", + "the 'envDef' field is not valid because:", SourceLine(_doc, "envDef", str), [e], ) @@ -15136,13 +15682,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -15184,6 +15732,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -15195,7 +15744,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15203,13 +15752,18 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.envDef is not None and "envDef" not in r: r["envDef"] = save( - self.envDef, top=False, base_url=base_url, relative_uris=relative_uris + self.envDef, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15218,6 +15772,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -15316,13 +15871,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -15364,6 +15921,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -15375,7 +15933,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15383,7 +15941,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) # top refers to the directory level @@ -15517,7 +16076,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `coresMin` field is not valid because:", + "the 'coresMin' field is not valid because:", SourceLine(_doc, "coresMin", str), [e], ) @@ -15535,7 +16094,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `coresMax` field is not valid because:", + "the 'coresMax' field is not valid because:", SourceLine(_doc, "coresMax", str), [e], ) @@ -15553,7 +16112,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `ramMin` field is not valid because:", + "the 'ramMin' field is not valid because:", SourceLine(_doc, "ramMin", str), [e], ) @@ -15571,7 +16130,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `ramMax` field is not valid because:", + "the 'ramMax' field is not valid because:", SourceLine(_doc, "ramMax", str), [e], ) @@ -15589,7 +16148,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `tmpdirMin` field is not valid because:", + "the 'tmpdirMin' field is not valid because:", SourceLine(_doc, "tmpdirMin", str), [e], ) @@ -15607,7 +16166,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `tmpdirMax` field is not valid because:", + "the 'tmpdirMax' field is not valid because:", SourceLine(_doc, "tmpdirMax", str), [e], ) @@ -15625,7 +16184,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outdirMin` field is not valid because:", + "the 'outdirMin' field is not valid because:", SourceLine(_doc, "outdirMin", str), [e], ) @@ -15643,7 +16202,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outdirMax` field is not valid because:", + "the 'outdirMax' field is not valid because:", SourceLine(_doc, "outdirMax", str), [e], ) @@ -15690,13 +16249,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -15738,6 +16299,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -15749,7 +16311,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15757,13 +16319,18 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.coresMin is not None and "coresMin" not in r: r["coresMin"] = save( - self.coresMin, top=False, base_url=base_url, relative_uris=relative_uris + self.coresMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15772,12 +16339,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.coresMax is not None and "coresMax" not in r: r["coresMax"] = save( - self.coresMax, top=False, base_url=base_url, relative_uris=relative_uris + self.coresMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15786,12 +16358,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.ramMin is not None and "ramMin" not in r: r["ramMin"] = save( - self.ramMin, top=False, base_url=base_url, relative_uris=relative_uris + self.ramMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15800,12 +16377,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.ramMax is not None and "ramMax" not in r: r["ramMax"] = save( - self.ramMax, top=False, base_url=base_url, relative_uris=relative_uris + self.ramMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15814,6 +16396,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.tmpdirMin is not None and "tmpdirMin" not in r: r["tmpdirMin"] = save( @@ -15821,8 +16404,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15831,6 +16415,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.tmpdirMax is not None and "tmpdirMax" not in r: r["tmpdirMax"] = save( @@ -15838,8 +16423,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15848,6 +16434,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outdirMin is not None and "outdirMin" not in r: r["outdirMin"] = save( @@ -15855,8 +16442,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15865,6 +16453,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outdirMax is not None and "outdirMax" not in r: r["outdirMax"] = save( @@ -15872,8 +16461,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -15882,6 +16472,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -15976,7 +16567,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `enableReuse` field is not valid because:", + "the 'enableReuse' field is not valid because:", SourceLine(_doc, "enableReuse", str), [e], ) @@ -16014,13 +16605,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -16062,6 +16655,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -16073,7 +16667,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16081,7 +16675,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.enableReuse is not None and "enableReuse" not in r: r["enableReuse"] = save( @@ -16089,8 +16684,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16099,6 +16695,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -16188,7 +16785,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `networkAccess` field is not valid because:", + "the 'networkAccess' field is not valid because:", SourceLine(_doc, "networkAccess", str), [e], ) @@ -16226,13 +16823,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -16274,6 +16873,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -16285,7 +16885,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16293,7 +16893,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.networkAccess is not None and "networkAccess" not in r: r["networkAccess"] = save( @@ -16301,8 +16902,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16311,6 +16913,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -16415,7 +17018,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inplaceUpdate` field is not valid because:", + "the 'inplaceUpdate' field is not valid because:", SourceLine(_doc, "inplaceUpdate", str), [e], ) @@ -16455,13 +17058,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -16503,6 +17108,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -16514,7 +17120,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16522,7 +17128,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.inplaceUpdate is not None and "inplaceUpdate" not in r: r["inplaceUpdate"] = save( @@ -16530,8 +17137,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16540,6 +17148,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -16620,7 +17229,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `timelimit` field is not valid because:", + "the 'timelimit' field is not valid because:", SourceLine(_doc, "timelimit", str), [e], ) @@ -16658,13 +17267,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -16706,6 +17317,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -16717,7 +17329,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16725,7 +17337,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.timelimit is not None and "timelimit" not in r: r["timelimit"] = save( @@ -16733,8 +17346,9 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -16743,6 +17357,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -16836,7 +17451,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -16863,7 +17478,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -16881,7 +17496,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -16899,7 +17514,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -16917,7 +17532,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -16935,7 +17550,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -16952,7 +17567,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -16999,13 +17614,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -17064,6 +17681,7 @@ def save( base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -17075,7 +17693,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17083,12 +17701,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17097,6 +17716,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -17104,8 +17724,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17114,6 +17735,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -17121,8 +17743,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17131,6 +17754,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -17138,8 +17762,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17148,12 +17773,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17162,11 +17792,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17175,12 +17806,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17189,6 +17825,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -17300,7 +17937,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -17327,7 +17964,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -17345,7 +17982,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -17363,7 +18000,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -17381,7 +18018,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -17399,7 +18036,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -17417,7 +18054,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -17435,7 +18072,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", + "the 'loadListing' field is not valid because:", SourceLine(_doc, "loadListing", str), [e], ) @@ -17453,7 +18090,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `default` field is not valid because:", + "the 'default' field is not valid because:", SourceLine(_doc, "default", str), [e], ) @@ -17470,7 +18107,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -17486,7 +18123,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", + "the 'inputBinding' field is not valid because:", SourceLine(_doc, "inputBinding", str), [e], ) @@ -17539,13 +18176,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -17604,6 +18243,7 @@ def save( base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -17615,7 +18255,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17623,12 +18263,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17637,6 +18278,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -17644,8 +18286,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17654,6 +18297,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -17661,8 +18305,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17671,6 +18316,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -17678,8 +18324,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17688,12 +18335,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17702,11 +18354,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17715,6 +18368,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -17722,8 +18376,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17732,6 +18387,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -17739,8 +18395,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17749,6 +18406,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -17756,8 +18414,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17766,12 +18425,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17780,6 +18444,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -17787,8 +18452,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -17797,6 +18463,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -17934,7 +18601,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -17961,7 +18628,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -17979,7 +18646,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -17996,7 +18663,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputs` field is not valid because:", + "the 'inputs' field is not valid because:", SourceLine(_doc, "inputs", str), [e], ) @@ -18011,7 +18678,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputs` field is not valid because:", + "the 'outputs' field is not valid because:", SourceLine(_doc, "outputs", str), [e], ) @@ -18027,7 +18694,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `requirements` field is not valid because:", + "the 'requirements' field is not valid because:", SourceLine(_doc, "requirements", str), [e], ) @@ -18045,7 +18712,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `hints` field is not valid because:", + "the 'hints' field is not valid because:", SourceLine(_doc, "hints", str), [e], ) @@ -18063,7 +18730,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", + "the 'cwlVersion' field is not valid because:", SourceLine(_doc, "cwlVersion", str), [e], ) @@ -18081,7 +18748,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `intent` field is not valid because:", + "the 'intent' field is not valid because:", SourceLine(_doc, "intent", str), [e], ) @@ -18098,7 +18765,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `expression` field is not valid because:", + "the 'expression' field is not valid because:", SourceLine(_doc, "expression", str), [e], ) @@ -18146,13 +18813,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -18213,6 +18882,7 @@ def save( base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -18224,7 +18894,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18232,12 +18902,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18246,6 +18917,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -18253,8 +18925,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18263,12 +18936,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18277,6 +18955,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -18284,8 +18963,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18294,6 +18974,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -18301,8 +18982,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18311,6 +18993,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -18318,8 +19001,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18328,6 +19012,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -18335,8 +19020,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18345,13 +19031,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.cwlVersion is not None and "cwlVersion" not in r: u = save_relative_uri( self.cwlVersion, str(self.id), False, None, relative_uris ) r["cwlVersion"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18360,11 +19047,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.intent is not None and "intent" not in r: u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) r["intent"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18373,6 +19061,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.expression is not None and "expression" not in r: r["expression"] = save( @@ -18380,8 +19069,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18390,6 +19080,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -18520,7 +19211,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -18547,7 +19238,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -18565,7 +19256,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -18583,7 +19274,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -18601,7 +19292,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -18619,7 +19310,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -18637,7 +19328,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputSource` field is not valid because:", + "the 'outputSource' field is not valid because:", SourceLine(_doc, "outputSource", str), [e], ) @@ -18655,7 +19346,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `linkMerge` field is not valid because:", + "the 'linkMerge' field is not valid because:", SourceLine(_doc, "linkMerge", str), [e], ) @@ -18673,7 +19364,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `pickValue` field is not valid because:", + "the 'pickValue' field is not valid because:", SourceLine(_doc, "pickValue", str), [e], ) @@ -18690,7 +19381,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -18740,13 +19431,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -18805,6 +19498,7 @@ def save( base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -18816,7 +19510,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18824,12 +19518,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18838,6 +19533,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -18845,8 +19541,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18855,6 +19552,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -18862,8 +19560,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18872,6 +19571,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -18879,8 +19579,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18889,12 +19590,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18903,11 +19609,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18916,13 +19623,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputSource is not None and "outputSource" not in r: u = save_relative_uri( self.outputSource, str(self.id), False, 1, relative_uris ) r["outputSource"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18931,6 +19639,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.linkMerge is not None and "linkMerge" not in r: r["linkMerge"] = save( @@ -18938,8 +19647,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18948,6 +19658,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.pickValue is not None and "pickValue" not in r: r["pickValue"] = save( @@ -18955,8 +19666,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18965,12 +19677,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -18979,6 +19696,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -19209,7 +19927,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -19236,7 +19954,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `source` field is not valid because:", + "the 'source' field is not valid because:", SourceLine(_doc, "source", str), [e], ) @@ -19254,7 +19972,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `linkMerge` field is not valid because:", + "the 'linkMerge' field is not valid because:", SourceLine(_doc, "linkMerge", str), [e], ) @@ -19272,7 +19990,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `pickValue` field is not valid because:", + "the 'pickValue' field is not valid because:", SourceLine(_doc, "pickValue", str), [e], ) @@ -19290,7 +20008,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -19308,7 +20026,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", + "the 'loadListing' field is not valid because:", SourceLine(_doc, "loadListing", str), [e], ) @@ -19326,7 +20044,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -19344,7 +20062,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `default` field is not valid because:", + "the 'default' field is not valid because:", SourceLine(_doc, "default", str), [e], ) @@ -19362,7 +20080,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `valueFrom` field is not valid because:", + "the 'valueFrom' field is not valid because:", SourceLine(_doc, "valueFrom", str), [e], ) @@ -19411,13 +20129,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -19476,6 +20196,7 @@ def save( base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -19487,7 +20208,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19495,12 +20216,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19509,11 +20231,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.source is not None and "source" not in r: u = save_relative_uri(self.source, str(self.id), False, 2, relative_uris) r["source"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19522,6 +20245,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.linkMerge is not None and "linkMerge" not in r: r["linkMerge"] = save( @@ -19529,8 +20253,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19539,6 +20264,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.pickValue is not None and "pickValue" not in r: r["pickValue"] = save( @@ -19546,8 +20272,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19556,6 +20283,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -19563,8 +20291,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19573,6 +20302,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -19580,8 +20310,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19590,6 +20321,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -19597,8 +20329,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19607,6 +20340,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -19614,8 +20348,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19624,6 +20359,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.valueFrom is not None and "valueFrom" not in r: r["valueFrom"] = save( @@ -19631,8 +20367,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19641,6 +20378,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -19728,7 +20466,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -19776,13 +20514,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -19841,6 +20581,7 @@ def save( base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -19852,7 +20593,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19860,12 +20601,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -19874,6 +20616,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -20066,7 +20809,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -20093,7 +20836,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -20111,7 +20854,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -20128,7 +20871,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `in` field is not valid because:", + "the 'in' field is not valid because:", SourceLine(_doc, "in", str), [e], ) @@ -20143,7 +20886,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `out` field is not valid because:", + "the 'out' field is not valid because:", SourceLine(_doc, "out", str), [e], ) @@ -20159,7 +20902,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `requirements` field is not valid because:", + "the 'requirements' field is not valid because:", SourceLine(_doc, "requirements", str), [e], ) @@ -20177,7 +20920,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `hints` field is not valid because:", + "the 'hints' field is not valid because:", SourceLine(_doc, "hints", str), [e], ) @@ -20196,7 +20939,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `run` field is not valid because:", + "the 'run' field is not valid because:", SourceLine(_doc, "run", str), [e], ) @@ -20212,7 +20955,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `when` field is not valid because:", + "the 'when' field is not valid because:", SourceLine(_doc, "when", str), [e], ) @@ -20230,7 +20973,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `scatter` field is not valid because:", + "the 'scatter' field is not valid because:", SourceLine(_doc, "scatter", str), [e], ) @@ -20248,7 +20991,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `scatterMethod` field is not valid because:", + "the 'scatterMethod' field is not valid because:", SourceLine(_doc, "scatterMethod", str), [e], ) @@ -20299,13 +21042,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -20364,6 +21109,7 @@ def save( base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -20375,7 +21121,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20383,12 +21129,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20397,6 +21144,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -20404,8 +21152,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20414,12 +21163,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20428,12 +21182,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.in_ is not None and "in" not in r: r["in"] = save( - self.in_, top=False, base_url=str(self.id), relative_uris=relative_uris + self.in_, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20442,11 +21201,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.out is not None and "out" not in r: u = save_relative_uri(self.out, str(self.id), True, None, relative_uris) r["out"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20455,6 +21215,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -20462,8 +21223,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20472,6 +21234,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -20479,8 +21242,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20489,11 +21253,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.run is not None and "run" not in r: u = save_relative_uri(self.run, str(self.id), False, None, relative_uris) r["run"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20502,12 +21267,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.when is not None and "when" not in r: r["when"] = save( - self.when, top=False, base_url=str(self.id), relative_uris=relative_uris + self.when, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20516,11 +21286,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.scatter is not None and "scatter" not in r: u = save_relative_uri(self.scatter, str(self.id), False, 0, relative_uris) r["scatter"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20529,13 +21300,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.scatterMethod is not None and "scatterMethod" not in r: u = save_relative_uri( self.scatterMethod, str(self.id), False, None, relative_uris ) r["scatterMethod"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -20544,6 +21316,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -20725,7 +21498,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -20752,7 +21525,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -20770,7 +21543,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -20787,7 +21560,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputs` field is not valid because:", + "the 'inputs' field is not valid because:", SourceLine(_doc, "inputs", str), [e], ) @@ -20802,7 +21575,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputs` field is not valid because:", + "the 'outputs' field is not valid because:", SourceLine(_doc, "outputs", str), [e], ) @@ -20818,7 +21591,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `requirements` field is not valid because:", + "the 'requirements' field is not valid because:", SourceLine(_doc, "requirements", str), [e], ) @@ -20836,7 +21609,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `hints` field is not valid because:", + "the 'hints' field is not valid because:", SourceLine(_doc, "hints", str), [e], ) @@ -20854,7 +21627,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", + "the 'cwlVersion' field is not valid because:", SourceLine(_doc, "cwlVersion", str), [e], ) @@ -20872,7 +21645,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `intent` field is not valid because:", + "the 'intent' field is not valid because:", SourceLine(_doc, "intent", str), [e], ) @@ -20889,7 +21662,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `steps` field is not valid because:", + "the 'steps' field is not valid because:", SourceLine(_doc, "steps", str), [e], ) @@ -20937,13 +21710,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -21004,6 +21779,7 @@ def save( base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -21015,7 +21791,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21023,12 +21799,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21037,6 +21814,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -21044,8 +21822,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21054,12 +21833,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21068,6 +21852,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -21075,8 +21860,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21085,6 +21871,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -21092,8 +21879,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21102,6 +21890,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -21109,8 +21898,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21119,6 +21909,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -21126,8 +21917,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21136,13 +21928,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.cwlVersion is not None and "cwlVersion" not in r: u = save_relative_uri( self.cwlVersion, str(self.id), False, None, relative_uris ) r["cwlVersion"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21151,11 +21944,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.intent is not None and "intent" not in r: u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) r["intent"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21164,6 +21958,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.steps is not None and "steps" not in r: r["steps"] = save( @@ -21171,8 +21966,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21181,6 +21977,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -21288,13 +22085,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -21336,6 +22135,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -21347,7 +22147,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21355,7 +22155,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) # top refers to the directory level @@ -21449,13 +22250,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -21497,6 +22300,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -21508,7 +22312,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21516,7 +22320,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) # top refers to the directory level @@ -21610,13 +22415,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -21658,6 +22465,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -21669,7 +22477,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21677,7 +22485,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) # top refers to the directory level @@ -21771,13 +22580,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -21819,6 +22630,7 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -21830,7 +22642,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -21838,7 +22650,8 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) # top refers to the directory level @@ -21949,7 +22762,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -21976,7 +22789,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -21994,7 +22807,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -22012,7 +22825,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -22030,7 +22843,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -22048,7 +22861,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -22066,7 +22879,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", + "the 'loadContents' field is not valid because:", SourceLine(_doc, "loadContents", str), [e], ) @@ -22084,7 +22897,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", + "the 'loadListing' field is not valid because:", SourceLine(_doc, "loadListing", str), [e], ) @@ -22102,7 +22915,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `default` field is not valid because:", + "the 'default' field is not valid because:", SourceLine(_doc, "default", str), [e], ) @@ -22119,7 +22932,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -22169,13 +22982,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -22234,6 +23049,7 @@ def save( base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -22245,7 +23061,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22253,12 +23069,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22267,6 +23084,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -22274,8 +23092,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22284,6 +23103,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -22291,8 +23111,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22301,6 +23122,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -22308,8 +23130,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22318,12 +23141,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22332,11 +23160,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22345,6 +23174,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -22352,8 +23182,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22362,6 +23193,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -22369,8 +23201,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22379,6 +23212,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -22386,8 +23220,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22396,12 +23231,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22410,6 +23250,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -22521,7 +23362,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -22548,7 +23389,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -22566,7 +23407,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", + "the 'secondaryFiles' field is not valid because:", SourceLine(_doc, "secondaryFiles", str), [e], ) @@ -22584,7 +23425,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", + "the 'streamable' field is not valid because:", SourceLine(_doc, "streamable", str), [e], ) @@ -22602,7 +23443,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -22620,7 +23461,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `format` field is not valid because:", + "the 'format' field is not valid because:", SourceLine(_doc, "format", str), [e], ) @@ -22637,7 +23478,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `type` field is not valid because:", + "the 'type' field is not valid because:", SourceLine(_doc, "type", str), [e], ) @@ -22684,13 +23525,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -22749,6 +23592,7 @@ def save( base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -22760,7 +23604,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22768,12 +23612,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22782,6 +23627,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -22789,8 +23635,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22799,6 +23646,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -22806,8 +23654,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22816,6 +23665,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -22823,8 +23673,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22833,12 +23684,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22847,11 +23703,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) r["format"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22860,12 +23717,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=str(self.id), relative_uris=relative_uris + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -22874,6 +23736,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -22995,7 +23858,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `id` field is not valid because:", + "the 'id' field is not valid because:", SourceLine(_doc, "id", str), [e], ) @@ -23022,7 +23885,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `label` field is not valid because:", + "the 'label' field is not valid because:", SourceLine(_doc, "label", str), [e], ) @@ -23040,7 +23903,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `doc` field is not valid because:", + "the 'doc' field is not valid because:", SourceLine(_doc, "doc", str), [e], ) @@ -23057,7 +23920,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `inputs` field is not valid because:", + "the 'inputs' field is not valid because:", SourceLine(_doc, "inputs", str), [e], ) @@ -23072,7 +23935,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `outputs` field is not valid because:", + "the 'outputs' field is not valid because:", SourceLine(_doc, "outputs", str), [e], ) @@ -23088,7 +23951,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `requirements` field is not valid because:", + "the 'requirements' field is not valid because:", SourceLine(_doc, "requirements", str), [e], ) @@ -23106,7 +23969,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `hints` field is not valid because:", + "the 'hints' field is not valid because:", SourceLine(_doc, "hints", str), [e], ) @@ -23124,7 +23987,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", + "the 'cwlVersion' field is not valid because:", SourceLine(_doc, "cwlVersion", str), [e], ) @@ -23142,7 +24005,7 @@ def fromDoc( except ValidationException as e: _errors__.append( ValidationException( - "the `intent` field is not valid because:", + "the 'intent' field is not valid because:", SourceLine(_doc, "intent", str), [e], ) @@ -23191,13 +24054,15 @@ def save( top: bool = False, base_url: str = "", relative_uris: bool = True, - keys: Optional[List[Any]] = None + keys: Optional[List[Any]] = None, + inserted_line_info: Dict[int, int] = {} ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() doc = copy.copy(doc_line_info) keys = copy.copy(keys) + inserted_line_info = copy.copy(inserted_line_info) for key in keys: if isinstance(doc, CommentedMap): @@ -23258,6 +24123,7 @@ def save( base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], + inserted_line_info=inserted_line_info ) # If the returned value is a list of size 1, just save the value in the list @@ -23269,7 +24135,7 @@ def save( r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -23277,12 +24143,13 @@ def save( val=r.get(key), cols=cols, min_col=min_col, - max_len=max_len + max_len=max_len, + inserted_line_info=inserted_line_info ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -23291,6 +24158,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -23298,8 +24166,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -23308,12 +24177,17 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=str(self.id), relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -23322,6 +24196,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -23329,8 +24204,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -23339,6 +24215,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -23346,8 +24223,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -23356,6 +24234,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -23363,8 +24242,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -23373,6 +24253,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -23380,8 +24261,9 @@ def save( top=False, base_url=str(self.id), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -23390,13 +24272,14 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.cwlVersion is not None and "cwlVersion" not in r: u = save_relative_uri( self.cwlVersion, str(self.id), False, None, relative_uris ) r["cwlVersion"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -23405,11 +24288,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) if self.intent is not None and "intent" not in r: u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) r["intent"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -23418,6 +24302,7 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, ) # top refers to the directory level @@ -23731,7 +24616,24 @@ def save( ), "PrimitiveType", ) +""" +Names of salad data types (based on Avro schema declarations). + +Refer to the [Avro schema declaration documentation](https://avro.apache.org/docs/current/spec.html#schemas) for +detailed information. + +null: no value +boolean: a binary value +int: 32-bit signed integer +long: 64-bit signed integer +float: single precision (32-bit) IEEE 754 floating-point number +double: double precision (64-bit) IEEE 754 floating-point number +string: Unicode character sequence +""" AnyLoader = _EnumLoader(("Any",), "Any") +""" +The **Any** type validates for any non-null value. +""" RecordFieldLoader = _RecordLoader(RecordField) RecordSchemaLoader = _RecordLoader(RecordSchema) EnumSchemaLoader = _RecordLoader(EnumSchema) @@ -23761,6 +24663,9 @@ def save( ), "CWLVersion", ) +""" +Version symbols for published CWL document versions. +""" CWLTypeLoader = _EnumLoader( ( "null", @@ -23775,6 +24680,11 @@ def save( ), "CWLType", ) +""" +Extends primitive types with the concept of a file and directory as a builtin type. +File: A File object +Directory: A Directory object +""" FileLoader = _RecordLoader(File) DirectoryLoader = _RecordLoader(Directory) LoadListingEnumLoader = _EnumLoader( @@ -23785,6 +24695,14 @@ def save( ), "LoadListingEnum", ) +""" +Specify the desired behavior for loading the `listing` field of +a Directory object for use by expressions. + +no_listing: Do not load the directory listing. +shallow_listing: Only load the top level listing, do not recurse into subdirectories. +deep_listing: Load the directory listing and recursively load all subdirectories as well. +""" ExpressionLoader = _ExpressionLoader(str) InputBindingLoader = _RecordLoader(InputBinding) InputRecordFieldLoader = _RecordLoader(InputRecordField) @@ -23814,8 +24732,119 @@ def save( CommandInputParameterLoader = _RecordLoader(CommandInputParameter) CommandOutputParameterLoader = _RecordLoader(CommandOutputParameter) stdinLoader = _EnumLoader(("stdin",), "stdin") +""" +Only valid as a `type` for a `CommandLineTool` input with no +`inputBinding` set. `stdin` must not be specified at the `CommandLineTool` +level. + +The following +``` +inputs: + an_input_name: + type: stdin +``` +is equivalent to +``` +inputs: + an_input_name: + type: File + streamable: true + +stdin: $(inputs.an_input_name.path) +``` +""" stdoutLoader = _EnumLoader(("stdout",), "stdout") +""" +Only valid as a `type` for a `CommandLineTool` output with no +`outputBinding` set. + +The following +``` +outputs: + an_output_name: + type: stdout + +stdout: a_stdout_file +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stdout_file + +stdout: a_stdout_file +``` + +If there is no `stdout` name provided, a random filename will be created. +For example, the following +``` +outputs: + an_output_name: + type: stdout +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stdout_filenameABCDEFG + +stdout: random_stdout_filenameABCDEFG +``` + +If the `CommandLineTool` contains logically chained commands +(e.g. `echo a && echo b`) `stdout` must include the output of +every command. +""" stderrLoader = _EnumLoader(("stderr",), "stderr") +""" +Only valid as a `type` for a `CommandLineTool` output with no +`outputBinding` set. + +The following +``` +outputs: + an_output_name: + type: stderr + +stderr: a_stderr_file +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stderr_file + +stderr: a_stderr_file +``` + +If there is no `stderr` name provided, a random filename will be created. +For example, the following +``` +outputs: + an_output_name: + type: stderr +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stderr_filenameABCDEFG + +stderr: random_stderr_filenameABCDEFG +``` +""" CommandLineToolLoader = _RecordLoader(CommandLineTool) DockerRequirementLoader = _RecordLoader(DockerRequirement) SoftwareRequirementLoader = _RecordLoader(SoftwareRequirement) @@ -23839,6 +24868,9 @@ def save( ), "LinkMergeMethod", ) +""" +The input link merge method, described in [WorkflowStepInput](#WorkflowStepInput). +""" PickValueMethodLoader = _EnumLoader( ( "first_non_null", @@ -23847,6 +24879,9 @@ def save( ), "PickValueMethod", ) +""" +Picking non-null values among inbound data links, described in [WorkflowStepInput](#WorkflowStepInput). +""" WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter) WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput) WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput) @@ -23858,6 +24893,9 @@ def save( ), "ScatterMethod", ) +""" +The scatter method, as described in [workflow step scatter](#WorkflowStep). +""" WorkflowStepLoader = _RecordLoader(WorkflowStep) WorkflowLoader = _RecordLoader(Workflow) SubworkflowFeatureRequirementLoader = _RecordLoader(SubworkflowFeatureRequirement) diff --git a/schema_salad/tests/test_line_numbers.py b/schema_salad/tests/test_line_numbers.py index 3d40689dc..fe7008f7b 100644 --- a/schema_salad/tests/test_line_numbers.py +++ b/schema_salad/tests/test_line_numbers.py @@ -49,8 +49,6 @@ def test_secondary_files_dsl() -> None: "required": [15, 35, 15, 45], } - cwl_v1_2.inserted_line_info = {} - def test_outputs_before_inputs() -> None: """ @@ -84,7 +82,6 @@ def test_outputs_before_inputs() -> None: "outputBinding": [7, 4, 8, 6], "id": [5, 2, 5, 6], } - cwl_v1_2.inserted_line_info = {} def test_type_dsl() -> None: From bdd5c04e03ec5aeb6ded189cc0bbfc16d94b1d99 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 5 Jun 2023 15:48:01 -0600 Subject: [PATCH 36/44] Updating codegen to support shifting down of text --- schema_salad/metaschema.py | 1006 +++-- schema_salad/python_codegen.py | 141 +- schema_salad/python_codegen_support.py | 160 +- schema_salad/tests/cwl_v1_0.py | 3107 +++++++------- schema_salad/tests/cwl_v1_1.py | 3558 ++++++++--------- schema_salad/tests/cwl_v1_2.py | 2715 +++++++------ schema_salad/tests/test_line_numbers.py | 64 +- .../tests/test_secondary_files_dsl.cwl | 1 + 8 files changed, 5639 insertions(+), 5113 deletions(-) diff --git a/schema_salad/metaschema.py b/schema_salad/metaschema.py index 0717aeac5..7b2261ef6 100644 --- a/schema_salad/metaschema.py +++ b/schema_salad/metaschema.py @@ -24,6 +24,7 @@ Type, Union, cast, + no_type_check, ) from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit from urllib.request import pathname2url @@ -47,6 +48,7 @@ doc_line_info = CommentedMap() + class LoadingOptions: idx: IdxType fileuri: Optional[str] @@ -206,6 +208,8 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: """Convert this object to a JSON/YAML friendly dictionary.""" @@ -245,28 +249,41 @@ def add_kv( max_len: int, cols: Dict[int, int], min_col: int = 0, - inserted_line_info: Dict[int, int] = {} -) -> int: + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, +) -> Tuple[int, Optional[Dict[int, int]]]: """Add key value pair into Commented Map. Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers for each key/val pair in the old CommentedMap,key/val pair to insert, max_line of the old CommentedMap, and max col value taken for each line. """ + if inserted_line_info is None: + inserted_line_info = {} + if len(inserted_line_info.keys()) >= 1: max_line = max(inserted_line_info.keys()) + 1 else: max_line = 0 - if ( - key in line_numbers - ): # If the key to insert is in the original CommentedMap as a key - line_info = old_doc.lc.data[key] - if line_info[0] not in inserted_line_info: - new_doc.lc.add_kv_line_col(key, old_doc.lc.data[key]) - inserted_line_info[old_doc.lc.data[key][0]] = old_doc.lc.data[key][1] - else: - line = line_info[0] - while line in inserted_line_info.keys(): + + if key in line_numbers: # If the passed key to insert is in the original CommentedMap as a key + line_info = old_doc.lc.data[key] # Get the line information for the key + if ( + line_info[0] + shift not in inserted_line_info + ): # If the line of the key + shift isn't taken, add it + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[key][0] + shift, + old_doc.lc.data[key][1], + old_doc.lc.data[key][2] + shift, + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[old_doc.lc.data[key][0] + shift] = old_doc.lc.data[key][1] + else: # If the line is already taken + line = line_info[0] + shift + while line in inserted_line_info.keys(): # Find the closest free line line += 1 new_doc.lc.add_kv_line_col( key, @@ -278,64 +295,93 @@ def add_kv( ], ) inserted_line_info[line] = old_doc.lc.data[key][1] - return max_len + return max_len, inserted_line_info elif isinstance(val, (int, float, str)) and not isinstance( val, bool ): # If the value is hashable if val in line_numbers: # If the value is in the original CommentedMap - line = line_numbers[val]["line"] - if line in inserted_line_info: + line = line_numbers[val]["line"] + shift # Get the line info for the value + if line in inserted_line_info: # Get the appropriate line to place value on line = max_line - if line in cols: - col = max(line_numbers[val]["col"], cols[line]) - else: - col = line_numbers[val]["col"] + + col = line_numbers[val]["col"] new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) inserted_line_info[line] = col + len(key) + 2 - cols[line] = col + len("id") + 2 - return max_len - elif isinstance(val, str): + return max_len, inserted_line_info + elif isinstance(val, str): # Logic for DSL expansition with "?" if val + "?" in line_numbers: - line = line_numbers[val + "?"]["line"] + line = line_numbers[val + "?"]["line"] + shift if line in inserted_line_info: line = max_line - if line in cols: - col = max(line_numbers[val + "?"]["col"], cols[line]) - else: - col = line_numbers[val + "?"]["col"] + col = line_numbers[val + "?"]["col"] new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) inserted_line_info[line] = col + len(key) + 2 - cols[line] = col + len("id") + 2 - return max_len + return max_len, inserted_line_info elif old_doc: if val in old_doc: index = old_doc.lc.data.index(val) line_info = old_doc.lc.data[index] - if line_info[0] not in inserted_line_info: - new_doc.lc.add_kv_line_col(key, old_doc.lc.data[index]) - inserted_line_info[old_doc.lc.data[index][0]] = old_doc.lc.data[ - index - ][1] + if line_info[0] + shift not in inserted_line_info: + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[index][0] + shift, + old_doc.lc.data[index][1], + old_doc.lc.data[index][2] + shift, + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[old_doc.lc.data[index][0] + shift] = old_doc.lc.data[index][ + 1 + ] else: new_doc.lc.add_kv_line_col( key, [ - max_line, + max_line + shift, old_doc.lc.data[index][1], - max_line + (max_line - old_doc.lc.data[index][2]), + max_line + (max_line - old_doc.lc.data[index][2]) + shift, old_doc.lc.data[index][3], ], ) - inserted_line_info[max_line] = old_doc.lc.data[index][1] - # If neither the key or value is in the original CommentedMap (or value is not hashable) - new_doc.lc.add_kv_line_col( - key, [max_line, min_col, max_line, min_col + len(key) + 2] - ) + inserted_line_info[max_line + shift] = old_doc.lc.data[index][1] + # If neither the key or value is in the original CommentedMap/old doc (or value is not hashable) + new_doc.lc.add_kv_line_col(key, [max_line, min_col, max_line, min_col + len(key) + 2]) inserted_line_info[max_line] = min_col + len(key) + 2 - return max_len + 1 + return max_len + 1, inserted_line_info + + +@no_type_check +def iterate_through_doc(keys: List[Any]) -> Optional[CommentedMap]: + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + return None + else: + return None + if isinstance(doc, CommentedSeq): + to_return = CommentedMap() + for index, key in enumerate(doc): + to_return[key] = "" + to_return.lc.add_kv_line_col( + key, + [ + doc.lc.data[index][0], + doc.lc.data[index][1], + doc.lc.data[index][0], + doc.lc.data[index][1], + ], + ) + return to_return + return doc -def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: +def get_line_numbers(doc: Optional[CommentedMap]) -> Dict[Any, Dict[str, int]]: """Get line numbers for kv pairs in CommentedMap. For each key/value pair in a CommentedMap, save the line/col info into a dictionary, @@ -390,7 +436,8 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> save_type: """Save a val of any type. @@ -399,22 +446,17 @@ def save( """ if keys is None: keys = [] - doc = doc_line_info - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) if isinstance(val, Saveable): return val.save( - top=top, base_url=base_url, relative_uris=relative_uris, keys=keys, inserted_line_info=inserted_line_info + top=top, + base_url=base_url, + relative_uris=relative_uris, + keys=keys, + inserted_line_info=inserted_line_info, + shift=shift, ) if isinstance(val, MutableSequence): r = CommentedSeq() @@ -432,7 +474,8 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=new_keys, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) ) return r @@ -454,6 +497,7 @@ def save( relative_uris=relative_uris, keys=new_keys, inserted_line_info=inserted_line_info, + shift=shift, ) return newdict @@ -1093,9 +1137,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, RecordField): return bool( - self.doc == other.doc - and self.name == other.name - and self.type == other.type + self.doc == other.doc and self.name == other.name and self.type == other.type ) return False @@ -1179,16 +1221,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `doc`, `name`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1212,25 +1250,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -1251,26 +1282,29 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1279,12 +1313,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1293,6 +1328,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -1300,8 +1337,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1310,6 +1349,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -1317,8 +1358,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1327,6 +1370,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -1418,16 +1463,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `fields`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1449,25 +1490,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -1488,26 +1522,29 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1516,13 +1553,19 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( - self.fields, top=False, base_url=base_url, relative_uris=relative_uris + self.fields, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1531,12 +1574,19 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1545,6 +1595,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -1671,9 +1723,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -1704,25 +1754,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -1743,26 +1786,29 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1771,12 +1817,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1785,13 +1832,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) r["symbols"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1800,6 +1847,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -1807,8 +1856,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -1817,6 +1868,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -1905,16 +1958,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `items`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1936,25 +1985,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -1975,26 +2017,29 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2003,12 +2048,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.items is not None and "items" not in r: u = save_relative_uri(self.items, base_url, False, 2, relative_uris) r["items"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2017,12 +2063,19 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( - self.type, top=False, base_url=base_url, relative_uris=relative_uris + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2031,6 +2084,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -2337,9 +2392,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2377,25 +2430,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -2416,26 +2462,29 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2444,12 +2493,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self._id is not None and "_id" not in r: u = save_relative_uri(self._id, base_url, True, None, relative_uris) r["_id"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2458,12 +2508,19 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self._type is not None and "_type" not in r: r["_type"] = save( - self._type, top=False, base_url=base_url, relative_uris=relative_uris + self._type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2472,6 +2529,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self._container is not None and "_container" not in r: r["_container"] = save( @@ -2479,8 +2538,10 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2489,12 +2550,19 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.identity is not None and "identity" not in r: r["identity"] = save( - self.identity, top=False, base_url=base_url, relative_uris=relative_uris + self.identity, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2503,6 +2571,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.noLinkCheck is not None and "noLinkCheck" not in r: r["noLinkCheck"] = save( @@ -2510,8 +2580,10 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2520,6 +2592,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.mapSubject is not None and "mapSubject" not in r: r["mapSubject"] = save( @@ -2527,8 +2601,10 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2537,6 +2613,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.mapPredicate is not None and "mapPredicate" not in r: r["mapPredicate"] = save( @@ -2544,8 +2622,10 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2554,12 +2634,19 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.refScope is not None and "refScope" not in r: r["refScope"] = save( - self.refScope, top=False, base_url=base_url, relative_uris=relative_uris + self.refScope, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2568,12 +2655,19 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.typeDSL is not None and "typeDSL" not in r: r["typeDSL"] = save( - self.typeDSL, top=False, base_url=base_url, relative_uris=relative_uris + self.typeDSL, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2582,6 +2676,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFilesDSL is not None and "secondaryFilesDSL" not in r: r["secondaryFilesDSL"] = save( @@ -2589,8 +2685,10 @@ def save( top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2599,12 +2697,19 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.subscope is not None and "subscope" not in r: r["subscope"] = save( - self.subscope, top=False, base_url=base_url, relative_uris=relative_uris + self.subscope, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2613,6 +2718,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -2718,9 +2825,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2749,25 +2854,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -2788,26 +2886,29 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2816,14 +2917,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.specializeFrom is not None and "specializeFrom" not in r: - u = save_relative_uri( - self.specializeFrom, base_url, False, 1, relative_uris - ) + u = save_relative_uri(self.specializeFrom, base_url, False, 1, relative_uris) r["specializeFrom"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2832,11 +2932,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.specializeTo is not None and "specializeTo" not in r: u = save_relative_uri(self.specializeTo, base_url, False, 1, relative_uris) r["specializeTo"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -2845,6 +2947,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -2917,9 +3021,7 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash( - (self.doc, self.name, self.type, self.jsonldPredicate, self.default) - ) + return hash((self.doc, self.name, self.type, self.jsonldPredicate, self.default)) @classmethod def fromDoc( @@ -3034,9 +3136,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3069,25 +3169,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -3108,26 +3201,29 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3136,12 +3232,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3150,6 +3247,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -3157,8 +3256,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3167,6 +3268,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -3174,8 +3277,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3184,6 +3289,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.jsonldPredicate is not None and "jsonldPredicate" not in r: r["jsonldPredicate"] = save( @@ -3191,8 +3298,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3201,6 +3310,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -3208,8 +3319,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3218,6 +3331,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -3568,9 +3683,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3611,25 +3724,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -3650,26 +3756,29 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3678,12 +3787,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3692,6 +3802,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.inVocab is not None and "inVocab" not in r: r["inVocab"] = save( @@ -3699,8 +3811,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3709,6 +3823,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -3716,8 +3832,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3726,6 +3844,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -3733,8 +3853,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3743,6 +3865,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -3750,8 +3874,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3760,13 +3886,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.docParent is not None and "docParent" not in r: - u = save_relative_uri( - self.docParent, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docParent, str(self.name), False, None, relative_uris) r["docParent"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3775,13 +3901,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.docChild is not None and "docChild" not in r: - u = save_relative_uri( - self.docChild, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docChild, str(self.name), False, None, relative_uris) r["docChild"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3790,13 +3916,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.docAfter is not None and "docAfter" not in r: - u = save_relative_uri( - self.docAfter, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docAfter, str(self.name), False, None, relative_uris) r["docAfter"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3805,6 +3931,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.jsonldPredicate is not None and "jsonldPredicate" not in r: r["jsonldPredicate"] = save( @@ -3812,8 +3940,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3822,6 +3952,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.documentRoot is not None and "documentRoot" not in r: r["documentRoot"] = save( @@ -3829,8 +3961,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3839,6 +3973,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.abstract is not None and "abstract" not in r: r["abstract"] = save( @@ -3846,8 +3982,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3856,11 +3994,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.extends is not None and "extends" not in r: u = save_relative_uri(self.extends, str(self.name), False, 1, relative_uris) r["extends"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3869,6 +4009,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.specialize is not None and "specialize" not in r: r["specialize"] = save( @@ -3876,8 +4018,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -3886,6 +4030,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -4210,9 +4356,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4251,25 +4395,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -4290,26 +4427,29 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4318,12 +4458,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4332,6 +4473,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.inVocab is not None and "inVocab" not in r: r["inVocab"] = save( @@ -4339,8 +4482,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4349,13 +4494,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) r["symbols"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4364,6 +4509,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4371,8 +4518,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4381,6 +4530,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -4388,8 +4539,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4398,13 +4551,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.docParent is not None and "docParent" not in r: - u = save_relative_uri( - self.docParent, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docParent, str(self.name), False, None, relative_uris) r["docParent"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4413,13 +4566,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.docChild is not None and "docChild" not in r: - u = save_relative_uri( - self.docChild, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docChild, str(self.name), False, None, relative_uris) r["docChild"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4428,13 +4581,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.docAfter is not None and "docAfter" not in r: - u = save_relative_uri( - self.docAfter, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docAfter, str(self.name), False, None, relative_uris) r["docAfter"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4443,6 +4596,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.jsonldPredicate is not None and "jsonldPredicate" not in r: r["jsonldPredicate"] = save( @@ -4450,8 +4605,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4460,6 +4617,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.documentRoot is not None and "documentRoot" not in r: r["documentRoot"] = save( @@ -4467,8 +4626,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4477,11 +4638,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.extends is not None and "extends" not in r: u = save_relative_uri(self.extends, str(self.name), False, 1, relative_uris) r["extends"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4490,6 +4653,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -4728,9 +4893,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4765,25 +4928,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -4804,26 +4960,29 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4832,12 +4991,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4846,6 +5006,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.inVocab is not None and "inVocab" not in r: r["inVocab"] = save( @@ -4853,8 +5015,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4863,6 +5027,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -4870,8 +5036,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4880,13 +5048,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.docParent is not None and "docParent" not in r: - u = save_relative_uri( - self.docParent, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docParent, str(self.name), False, None, relative_uris) r["docParent"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4895,13 +5063,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.docChild is not None and "docChild" not in r: - u = save_relative_uri( - self.docChild, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docChild, str(self.name), False, None, relative_uris) r["docChild"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4910,13 +5078,13 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.docAfter is not None and "docAfter" not in r: - u = save_relative_uri( - self.docAfter, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docAfter, str(self.name), False, None, relative_uris) r["docAfter"] = u - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4925,6 +5093,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4932,8 +5102,10 @@ def save( top=False, base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - max_len = add_kv( + max_len, inserted_line_info = add_kv( old_doc=doc, new_doc=r, line_numbers=line_numbers, @@ -4942,6 +5114,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -4952,9 +5126,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - ["name", "inVocab", "doc", "docParent", "docChild", "docAfter", "type"] - ) + attrs = frozenset(["name", "inVocab", "doc", "docParent", "docChild", "docAfter", "type"]) _vocab = { @@ -5186,17 +5358,15 @@ def save( ) Documentation_nameLoader = _EnumLoader(("documentation",), "Documentation_name") typedsl_Documentation_nameLoader_2 = _TypeDSLLoader(Documentation_nameLoader, 2) -union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = ( - _UnionLoader( - ( - SaladRecordSchemaLoader, - SaladEnumSchemaLoader, - DocumentationLoader, - ) +union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _UnionLoader( + ( + SaladRecordSchemaLoader, + SaladEnumSchemaLoader, + DocumentationLoader, ) ) -array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _ArrayLoader( - union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader +array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = ( + _ArrayLoader(union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader) ) union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader_or_array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _UnionLoader( ( diff --git a/schema_salad/python_codegen.py b/schema_salad/python_codegen.py index 614f22369..1becd9122 100644 --- a/schema_salad/python_codegen.py +++ b/schema_salad/python_codegen.py @@ -275,26 +275,19 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: @@ -305,11 +298,6 @@ def save( keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -336,26 +324,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -403,13 +383,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -430,7 +415,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) """ ) @@ -443,13 +429,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -470,69 +461,12 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) """ ) - # self.serializer.write( - # """ - # if self.id is not None and "id" not in r: - # u = save_relative_uri(self.id, base_url, True, None, relative_uris) - # r["id"] = u - # add_kv( - # old_doc=doc, - # new_doc=r, - # line_numbers=line_numbers, - # key="id", - # val=r.get("id"), - # cols=cols, - # min_col=min_col, - # max_len=max_len - # ) - # if doc: - # if u in doc: - # keys.append(u) - # if isinstance(doc.get(u), (CommentedMap, CommentedSeq)): - # doc = doc.get(u) - # line_numbers = get_line_numbers(doc) - # min_col = get_min_col(line_numbers) - # """ - # ) - # else: - # self.serializer.write( - # """ - # for key in self.ordered_attrs.keys(): - # if isinstance(key, str) and key not in r: - # if getattr(self, key) is not None: - # saved_val = save( - # getattr(self, key), - # top=False, - # base_url=base_url, - # relative_uris=relative_uris, - # keys=keys + [key], - # ) - - # if type(saved_val) == list: - # if ( - # len(saved_val) == 1 - # ): # If the returned value is a list of size 1, just save the value in the list - # saved_val = saved_val[0] - # r[key] = saved_val - - # add_kv( - # old_doc=doc, - # new_doc=r, - # line_numbers=line_numbers, - # key=key, - # val=r.get(key), - # cols=cols, - # min_col=min_col, - # max_len=max_len - # ) - # """ - # ) - def end_class(self, classname: str, field_names: List[str]) -> None: """Signal that we are done with this class.""" if self.current_class_is_abstract: @@ -817,7 +751,8 @@ def declare_field( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) """.format( safename=self.safe_name(name), @@ -835,7 +770,12 @@ def declare_field( """ if self.{safename} is not None and "{fieldname}" not in r: r["{fieldname}"] = save( - self.{safename}, top=False, base_url={baseurl}, relative_uris=relative_uris,inserted_line_info=inserted_line_info + self.{safename}, + top=False, + base_url={baseurl}, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -846,7 +786,8 @@ def declare_field( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) """.format( safename=self.safe_name(name), diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index 8c939c5c0..9233f3d17 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -21,6 +21,7 @@ Type, Union, cast, + no_type_check, ) from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit from urllib.request import pathname2url @@ -44,6 +45,7 @@ doc_line_info = CommentedMap() + class LoadingOptions: idx: IdxType fileuri: Optional[str] @@ -203,6 +205,8 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: """Convert this object to a JSON/YAML friendly dictionary.""" @@ -242,28 +246,41 @@ def add_kv( max_len: int, cols: Dict[int, int], min_col: int = 0, - inserted_line_info: Dict[int, int] = {} -) -> int: + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, +) -> Tuple[int, Optional[Dict[int, int]]]: """Add key value pair into Commented Map. Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers for each key/val pair in the old CommentedMap,key/val pair to insert, max_line of the old CommentedMap, and max col value taken for each line. """ + if inserted_line_info is None: + inserted_line_info = {} + if len(inserted_line_info.keys()) >= 1: max_line = max(inserted_line_info.keys()) + 1 else: max_line = 0 - if ( - key in line_numbers - ): # If the key to insert is in the original CommentedMap as a key - line_info = old_doc.lc.data[key] - if line_info[0] not in inserted_line_info: - new_doc.lc.add_kv_line_col(key, old_doc.lc.data[key]) - inserted_line_info[old_doc.lc.data[key][0]] = old_doc.lc.data[key][1] - else: - line = line_info[0] - while line in inserted_line_info.keys(): + + if key in line_numbers: # If the passed key to insert is in the original CommentedMap as a key + line_info = old_doc.lc.data[key] # Get the line information for the key + if ( + line_info[0] + shift not in inserted_line_info + ): # If the line of the key + shift isn't taken, add it + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[key][0] + shift, + old_doc.lc.data[key][1], + old_doc.lc.data[key][2] + shift, + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[old_doc.lc.data[key][0] + shift] = old_doc.lc.data[key][1] + else: # If the line is already taken + line = line_info[0] + shift + while line in inserted_line_info.keys(): # Find the closest free line line += 1 new_doc.lc.add_kv_line_col( key, @@ -275,64 +292,93 @@ def add_kv( ], ) inserted_line_info[line] = old_doc.lc.data[key][1] - return max_len + return max_len, inserted_line_info elif isinstance(val, (int, float, str)) and not isinstance( val, bool ): # If the value is hashable if val in line_numbers: # If the value is in the original CommentedMap - line = line_numbers[val]["line"] - if line in inserted_line_info: + line = line_numbers[val]["line"] + shift # Get the line info for the value + if line in inserted_line_info: # Get the appropriate line to place value on line = max_line - if line in cols: - col = max(line_numbers[val]["col"], cols[line]) - else: - col = line_numbers[val]["col"] + + col = line_numbers[val]["col"] new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) inserted_line_info[line] = col + len(key) + 2 - cols[line] = col + len("id") + 2 - return max_len - elif isinstance(val, str): + return max_len, inserted_line_info + elif isinstance(val, str): # Logic for DSL expansition with "?" if val + "?" in line_numbers: - line = line_numbers[val + "?"]["line"] + line = line_numbers[val + "?"]["line"] + shift if line in inserted_line_info: line = max_line - if line in cols: - col = max(line_numbers[val + "?"]["col"], cols[line]) - else: - col = line_numbers[val + "?"]["col"] + col = line_numbers[val + "?"]["col"] new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) inserted_line_info[line] = col + len(key) + 2 - cols[line] = col + len("id") + 2 - return max_len + return max_len, inserted_line_info elif old_doc: if val in old_doc: index = old_doc.lc.data.index(val) line_info = old_doc.lc.data[index] - if line_info[0] not in inserted_line_info: - new_doc.lc.add_kv_line_col(key, old_doc.lc.data[index]) - inserted_line_info[old_doc.lc.data[index][0]] = old_doc.lc.data[ - index - ][1] + if line_info[0] + shift not in inserted_line_info: + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[index][0] + shift, + old_doc.lc.data[index][1], + old_doc.lc.data[index][2] + shift, + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[old_doc.lc.data[index][0] + shift] = old_doc.lc.data[index][ + 1 + ] else: new_doc.lc.add_kv_line_col( key, [ - max_line, + max_line + shift, old_doc.lc.data[index][1], - max_line + (max_line - old_doc.lc.data[index][2]), + max_line + (max_line - old_doc.lc.data[index][2]) + shift, old_doc.lc.data[index][3], ], ) - inserted_line_info[max_line] = old_doc.lc.data[index][1] - # If neither the key or value is in the original CommentedMap (or value is not hashable) - new_doc.lc.add_kv_line_col( - key, [max_line, min_col, max_line, min_col + len(key) + 2] - ) + inserted_line_info[max_line + shift] = old_doc.lc.data[index][1] + # If neither the key or value is in the original CommentedMap/old doc (or value is not hashable) + new_doc.lc.add_kv_line_col(key, [max_line, min_col, max_line, min_col + len(key) + 2]) inserted_line_info[max_line] = min_col + len(key) + 2 - return max_len + 1 + return max_len + 1, inserted_line_info + + +@no_type_check +def iterate_through_doc(keys: List[Any]) -> Optional[CommentedMap]: + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + return None + else: + return None + if isinstance(doc, CommentedSeq): + to_return = CommentedMap() + for index, key in enumerate(doc): + to_return[key] = "" + to_return.lc.add_kv_line_col( + key, + [ + doc.lc.data[index][0], + doc.lc.data[index][1], + doc.lc.data[index][0], + doc.lc.data[index][1], + ], + ) + return to_return + return doc -def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: +def get_line_numbers(doc: Optional[CommentedMap]) -> Dict[Any, Dict[str, int]]: """Get line numbers for kv pairs in CommentedMap. For each key/value pair in a CommentedMap, save the line/col info into a dictionary, @@ -387,7 +433,8 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> save_type: """Save a val of any type. @@ -396,22 +443,17 @@ def save( """ if keys is None: keys = [] - doc = doc_line_info - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) if isinstance(val, Saveable): return val.save( - top=top, base_url=base_url, relative_uris=relative_uris, keys=keys, inserted_line_info=inserted_line_info + top=top, + base_url=base_url, + relative_uris=relative_uris, + keys=keys, + inserted_line_info=inserted_line_info, + shift=shift, ) if isinstance(val, MutableSequence): r = CommentedSeq() @@ -429,7 +471,8 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=new_keys, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) ) return r @@ -451,6 +494,7 @@ def save( relative_uris=relative_uris, keys=new_keys, inserted_line_info=inserted_line_info, + shift=shift, ) return newdict diff --git a/schema_salad/tests/cwl_v1_0.py b/schema_salad/tests/cwl_v1_0.py index d1f94fccd..a04565ca0 100644 --- a/schema_salad/tests/cwl_v1_0.py +++ b/schema_salad/tests/cwl_v1_0.py @@ -24,6 +24,7 @@ Type, Union, cast, + no_type_check, ) from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit from urllib.request import pathname2url @@ -47,6 +48,7 @@ doc_line_info = CommentedMap() + class LoadingOptions: idx: IdxType fileuri: Optional[str] @@ -206,6 +208,8 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: """Convert this object to a JSON/YAML friendly dictionary.""" @@ -245,28 +249,41 @@ def add_kv( max_len: int, cols: Dict[int, int], min_col: int = 0, - inserted_line_info: Dict[int, int] = {} -) -> int: + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, +) -> Tuple[int, Optional[Dict[int, int]]]: """Add key value pair into Commented Map. Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers for each key/val pair in the old CommentedMap,key/val pair to insert, max_line of the old CommentedMap, and max col value taken for each line. """ + if inserted_line_info is None: + inserted_line_info = {} + if len(inserted_line_info.keys()) >= 1: max_line = max(inserted_line_info.keys()) + 1 else: max_line = 0 - if ( - key in line_numbers - ): # If the key to insert is in the original CommentedMap as a key - line_info = old_doc.lc.data[key] - if line_info[0] not in inserted_line_info: - new_doc.lc.add_kv_line_col(key, old_doc.lc.data[key]) - inserted_line_info[old_doc.lc.data[key][0]] = old_doc.lc.data[key][1] - else: - line = line_info[0] - while line in inserted_line_info.keys(): + + if key in line_numbers: # If the passed key to insert is in the original CommentedMap as a key + line_info = old_doc.lc.data[key] # Get the line information for the key + if ( + line_info[0] + shift not in inserted_line_info + ): # If the line of the key + shift isn't taken, add it + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[key][0] + shift, + old_doc.lc.data[key][1], + old_doc.lc.data[key][2] + shift, + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[old_doc.lc.data[key][0] + shift] = old_doc.lc.data[key][1] + else: # If the line is already taken + line = line_info[0] + shift + while line in inserted_line_info.keys(): # Find the closest free line line += 1 new_doc.lc.add_kv_line_col( key, @@ -278,64 +295,93 @@ def add_kv( ], ) inserted_line_info[line] = old_doc.lc.data[key][1] - return max_len + return max_len, inserted_line_info elif isinstance(val, (int, float, str)) and not isinstance( val, bool ): # If the value is hashable if val in line_numbers: # If the value is in the original CommentedMap - line = line_numbers[val]["line"] - if line in inserted_line_info: + line = line_numbers[val]["line"] + shift # Get the line info for the value + if line in inserted_line_info: # Get the appropriate line to place value on line = max_line - if line in cols: - col = max(line_numbers[val]["col"], cols[line]) - else: - col = line_numbers[val]["col"] + + col = line_numbers[val]["col"] new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) inserted_line_info[line] = col + len(key) + 2 - cols[line] = col + len("id") + 2 - return max_len - elif isinstance(val, str): + return max_len, inserted_line_info + elif isinstance(val, str): # Logic for DSL expansition with "?" if val + "?" in line_numbers: - line = line_numbers[val + "?"]["line"] + line = line_numbers[val + "?"]["line"] + shift if line in inserted_line_info: line = max_line - if line in cols: - col = max(line_numbers[val + "?"]["col"], cols[line]) - else: - col = line_numbers[val + "?"]["col"] + col = line_numbers[val + "?"]["col"] new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) inserted_line_info[line] = col + len(key) + 2 - cols[line] = col + len("id") + 2 - return max_len + return max_len, inserted_line_info elif old_doc: if val in old_doc: index = old_doc.lc.data.index(val) line_info = old_doc.lc.data[index] - if line_info[0] not in inserted_line_info: - new_doc.lc.add_kv_line_col(key, old_doc.lc.data[index]) - inserted_line_info[old_doc.lc.data[index][0]] = old_doc.lc.data[ - index - ][1] + if line_info[0] + shift not in inserted_line_info: + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[index][0] + shift, + old_doc.lc.data[index][1], + old_doc.lc.data[index][2] + shift, + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[old_doc.lc.data[index][0] + shift] = old_doc.lc.data[index][ + 1 + ] else: new_doc.lc.add_kv_line_col( key, [ - max_line, + max_line + shift, old_doc.lc.data[index][1], - max_line + (max_line - old_doc.lc.data[index][2]), + max_line + (max_line - old_doc.lc.data[index][2]) + shift, old_doc.lc.data[index][3], ], ) - inserted_line_info[max_line] = old_doc.lc.data[index][1] - # If neither the key or value is in the original CommentedMap (or value is not hashable) - new_doc.lc.add_kv_line_col( - key, [max_line, min_col, max_line, min_col + len(key) + 2] - ) + inserted_line_info[max_line + shift] = old_doc.lc.data[index][1] + # If neither the key or value is in the original CommentedMap/old doc (or value is not hashable) + new_doc.lc.add_kv_line_col(key, [max_line, min_col, max_line, min_col + len(key) + 2]) inserted_line_info[max_line] = min_col + len(key) + 2 - return max_len + 1 + return max_len + 1, inserted_line_info -def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: +@no_type_check +def iterate_through_doc(keys: List[Any]) -> Optional[CommentedMap]: + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + return None + else: + return None + if isinstance(doc, CommentedSeq): + to_return = CommentedMap() + for index, key in enumerate(doc): + to_return[key] = "" + to_return.lc.add_kv_line_col( + key, + [ + doc.lc.data[index][0], + doc.lc.data[index][1], + doc.lc.data[index][0], + doc.lc.data[index][1], + ], + ) + return to_return + return doc + + +def get_line_numbers(doc: Optional[CommentedMap]) -> Dict[Any, Dict[str, int]]: """Get line numbers for kv pairs in CommentedMap. For each key/value pair in a CommentedMap, save the line/col info into a dictionary, @@ -390,7 +436,8 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> save_type: """Save a val of any type. @@ -399,22 +446,17 @@ def save( """ if keys is None: keys = [] - doc = doc_line_info - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) if isinstance(val, Saveable): return val.save( - top=top, base_url=base_url, relative_uris=relative_uris, keys=keys, inserted_line_info=inserted_line_info + top=top, + base_url=base_url, + relative_uris=relative_uris, + keys=keys, + inserted_line_info=inserted_line_info, + shift=shift, ) if isinstance(val, MutableSequence): r = CommentedSeq() @@ -432,7 +474,8 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=new_keys, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) ) return r @@ -454,6 +497,7 @@ def save( relative_uris=relative_uris, keys=new_keys, inserted_line_info=inserted_line_info, + shift=shift, ) return newdict @@ -1089,9 +1133,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, RecordField): return bool( - self.name == other.name - and self.doc == other.doc - and self.type == other.type + self.name == other.name and self.doc == other.doc and self.type == other.type ) return False @@ -1175,16 +1217,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `doc`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `name`, `doc`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1208,26 +1246,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -1248,21 +1278,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1276,7 +1309,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1291,6 +1325,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -1299,6 +1334,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1310,6 +1346,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -1318,6 +1355,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1329,6 +1367,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -1420,16 +1459,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `fields`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1451,26 +1486,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -1491,21 +1518,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1519,7 +1549,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -1528,6 +1559,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1539,6 +1571,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -1547,6 +1580,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1558,6 +1592,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -1651,16 +1686,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `symbols`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1682,26 +1713,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -1722,21 +1745,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1750,7 +1776,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -1765,6 +1792,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -1773,6 +1801,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1784,6 +1813,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -1872,16 +1902,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `items`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1903,26 +1929,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -1943,21 +1961,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1971,7 +1992,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -1980,6 +2002,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1991,6 +2014,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -1999,6 +2023,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2010,6 +2035,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -2387,9 +2413,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2427,26 +2451,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -2469,21 +2485,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -2497,7 +2516,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -2512,6 +2532,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.path is not None and "path" not in r: u = save_relative_uri(self.path, base_url, False, None, relative_uris) @@ -2526,6 +2547,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.basename is not None and "basename" not in r: r["basename"] = save( @@ -2534,6 +2556,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2545,6 +2568,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dirname is not None and "dirname" not in r: r["dirname"] = save( @@ -2553,6 +2577,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2564,6 +2589,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.nameroot is not None and "nameroot" not in r: r["nameroot"] = save( @@ -2572,6 +2598,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2583,6 +2610,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.nameext is not None and "nameext" not in r: r["nameext"] = save( @@ -2591,6 +2619,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2602,6 +2631,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.checksum is not None and "checksum" not in r: r["checksum"] = save( @@ -2610,6 +2640,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2621,6 +2652,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.size is not None and "size" not in r: r["size"] = save( @@ -2629,6 +2661,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2640,6 +2673,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -2648,6 +2682,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2659,6 +2694,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, base_url, True, None, relative_uris) @@ -2673,6 +2709,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.contents is not None and "contents" not in r: r["contents"] = save( @@ -2681,6 +2718,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2692,6 +2730,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -2804,9 +2843,7 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash( - (self.class_, self.location, self.path, self.basename, self.listing) - ) + return hash((self.class_, self.location, self.path, self.basename, self.listing)) @classmethod def fromDoc( @@ -2901,9 +2938,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2934,26 +2969,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -2976,21 +3003,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3004,7 +3034,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -3019,6 +3050,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.path is not None and "path" not in r: u = save_relative_uri(self.path, base_url, False, None, relative_uris) @@ -3033,6 +3065,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.basename is not None and "basename" not in r: r["basename"] = save( @@ -3041,6 +3074,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3052,6 +3086,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.listing is not None and "listing" not in r: r["listing"] = save( @@ -3060,6 +3095,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3071,6 +3107,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -3266,9 +3303,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3301,26 +3336,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -3341,21 +3368,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3369,7 +3399,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3384,6 +3415,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -3392,6 +3424,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3403,6 +3436,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -3411,6 +3445,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3422,6 +3457,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -3430,6 +3466,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3441,6 +3478,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -3449,6 +3487,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3460,6 +3499,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -3605,9 +3645,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3639,26 +3677,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -3679,21 +3709,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3707,7 +3740,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3722,6 +3756,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -3730,6 +3765,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3741,6 +3777,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -3749,6 +3786,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3760,6 +3798,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -3768,6 +3807,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3779,6 +3819,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -3942,9 +3983,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3977,26 +4016,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -4017,21 +4048,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4045,7 +4079,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4060,11 +4095,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4076,6 +4110,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4084,6 +4119,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4095,6 +4131,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -4103,6 +4140,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4114,6 +4152,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -4122,6 +4161,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4133,6 +4173,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -4266,9 +4307,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4299,26 +4338,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -4339,21 +4370,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4367,7 +4401,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -4376,6 +4411,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4387,6 +4423,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4395,6 +4432,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4406,6 +4444,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -4414,6 +4453,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4425,6 +4465,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -4433,6 +4474,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4444,6 +4486,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -4589,9 +4632,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4623,26 +4664,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -4663,21 +4696,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4691,7 +4727,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4706,6 +4743,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -4714,6 +4752,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4725,6 +4764,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4733,6 +4773,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4744,6 +4785,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -4752,6 +4794,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4763,6 +4806,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -4878,9 +4922,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4910,26 +4952,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -4950,21 +4984,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4978,7 +5015,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -4987,6 +5025,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4998,6 +5037,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -5006,6 +5046,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5017,6 +5058,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -5025,6 +5067,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5036,6 +5079,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -5169,9 +5213,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5202,26 +5244,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -5242,21 +5276,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -5270,7 +5307,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -5285,6 +5323,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -5293,6 +5332,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5304,6 +5344,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -5312,6 +5353,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5323,6 +5365,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -5331,6 +5374,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5342,6 +5386,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -5475,9 +5520,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5508,26 +5551,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -5548,21 +5583,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -5576,7 +5614,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -5585,6 +5624,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5596,6 +5636,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -5604,6 +5645,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5615,6 +5657,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -5623,6 +5666,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5634,6 +5678,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -5642,6 +5687,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5653,6 +5699,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -5918,9 +5965,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5957,41 +6002,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -6016,21 +6049,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6044,7 +6080,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -6059,6 +6096,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -6067,6 +6105,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6078,6 +6117,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -6086,6 +6126,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6097,6 +6138,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -6105,6 +6147,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6116,6 +6159,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -6124,6 +6168,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6135,6 +6180,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -6149,6 +6195,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -6157,6 +6204,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6168,6 +6216,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -6176,6 +6225,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6187,6 +6237,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -6195,6 +6246,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6206,6 +6258,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -6439,9 +6492,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6476,41 +6527,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -6535,21 +6574,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6563,7 +6605,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -6578,6 +6621,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -6586,6 +6630,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6597,6 +6642,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -6605,6 +6651,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6616,6 +6663,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -6624,6 +6672,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6635,6 +6684,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -6643,6 +6693,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6654,6 +6705,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -6662,6 +6714,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6673,6 +6726,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -6687,6 +6741,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -6764,10 +6819,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, InlineJavascriptRequirement): - return bool( - self.class_ == other.class_ - and self.expressionLib == other.expressionLib - ) + return bool(self.class_ == other.class_ and self.expressionLib == other.expressionLib) return False def __hash__(self) -> int: @@ -6812,9 +6864,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6828,9 +6878,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'InlineJavascriptRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'InlineJavascriptRequirement'", None, _errors__) _constructed = cls( expressionLib=expressionLib, extension_fields=extension_fields, @@ -6844,26 +6892,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -6886,21 +6926,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6914,7 +6957,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.expressionLib is not None and "expressionLib" not in r: r["expressionLib"] = save( @@ -6923,6 +6967,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6934,6 +6979,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -7021,16 +7067,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `types`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `types`".format(k), SourceLine(_doc, k, str), ) ) @@ -7051,26 +7093,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -7093,21 +7127,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7121,7 +7158,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.types is not None and "types" not in r: r["types"] = save( @@ -7130,6 +7168,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7141,6 +7180,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -7183,9 +7223,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, EnvironmentDef): - return bool( - self.envName == other.envName and self.envValue == other.envValue - ) + return bool(self.envName == other.envName and self.envValue == other.envValue) return False def __hash__(self) -> int: @@ -7238,16 +7276,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `envName`, `envValue`".format( - k - ), + "invalid field `{}`, expected one of: `envName`, `envValue`".format(k), SourceLine(_doc, k, str), ) ) @@ -7269,26 +7303,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -7309,21 +7335,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7337,7 +7366,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.envName is not None and "envName" not in r: r["envName"] = save( @@ -7346,6 +7376,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7357,6 +7388,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.envValue is not None and "envValue" not in r: r["envValue"] = save( @@ -7365,6 +7397,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7376,6 +7409,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -7627,9 +7661,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -7663,26 +7695,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -7703,21 +7727,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7731,7 +7758,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -7740,6 +7768,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7751,6 +7780,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.position is not None and "position" not in r: r["position"] = save( @@ -7759,6 +7789,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7770,6 +7801,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.prefix is not None and "prefix" not in r: r["prefix"] = save( @@ -7778,6 +7810,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7789,6 +7822,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.separate is not None and "separate" not in r: r["separate"] = save( @@ -7797,6 +7831,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7808,6 +7843,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.itemSeparator is not None and "itemSeparator" not in r: r["itemSeparator"] = save( @@ -7816,6 +7852,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7827,6 +7864,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.valueFrom is not None and "valueFrom" not in r: r["valueFrom"] = save( @@ -7835,6 +7873,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7846,6 +7885,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.shellQuote is not None and "shellQuote" not in r: r["shellQuote"] = save( @@ -7854,6 +7894,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7865,6 +7906,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -8007,9 +8049,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8039,26 +8079,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -8079,21 +8111,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8107,7 +8142,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.glob is not None and "glob" not in r: r["glob"] = save( @@ -8116,6 +8152,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8127,6 +8164,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8135,6 +8173,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8146,6 +8185,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputEval is not None and "outputEval" not in r: r["outputEval"] = save( @@ -8154,6 +8194,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8165,6 +8206,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -8331,9 +8373,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8347,9 +8387,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandInputRecordField'", None, _errors__ - ) + raise ValidationException("Trying 'CommandInputRecordField'", None, _errors__) _constructed = cls( name=name, doc=doc, @@ -8368,26 +8406,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -8408,21 +8438,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8436,7 +8469,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -8451,6 +8485,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -8459,6 +8494,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8470,6 +8506,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -8478,6 +8515,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8489,6 +8527,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -8497,6 +8536,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8508,6 +8548,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -8516,6 +8557,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8527,6 +8569,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -8672,9 +8715,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8688,9 +8729,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandInputRecordSchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandInputRecordSchema'", None, _errors__) _constructed = cls( fields=fields, type=type, @@ -8708,26 +8747,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -8748,21 +8779,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8776,7 +8810,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -8791,6 +8826,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -8799,6 +8835,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8810,6 +8847,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -8818,6 +8856,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8829,6 +8868,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -8837,6 +8877,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8848,6 +8889,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -9011,9 +9053,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9027,9 +9067,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandInputEnumSchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandInputEnumSchema'", None, _errors__) _constructed = cls( symbols=symbols, type=type, @@ -9048,26 +9086,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -9088,21 +9118,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -9116,7 +9149,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9131,11 +9165,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9147,6 +9180,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -9155,6 +9189,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9166,6 +9201,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -9174,6 +9210,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9185,6 +9222,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -9193,6 +9231,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9204,6 +9243,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -9337,9 +9377,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9353,9 +9391,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandInputArraySchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandInputArraySchema'", None, _errors__) _constructed = cls( items=items, type=type, @@ -9372,26 +9408,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -9412,21 +9440,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -9440,7 +9471,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -9449,6 +9481,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9460,6 +9493,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -9468,6 +9502,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9479,6 +9514,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -9487,6 +9523,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9498,6 +9535,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -9506,6 +9544,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9517,6 +9556,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -9662,9 +9702,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9678,9 +9716,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputRecordField'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputRecordField'", None, _errors__) _constructed = cls( name=name, doc=doc, @@ -9698,26 +9734,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -9738,21 +9766,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -9766,7 +9797,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9781,6 +9813,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -9789,6 +9822,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9800,6 +9834,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -9808,6 +9843,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9819,6 +9855,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -9827,6 +9864,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9838,6 +9876,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -9983,9 +10022,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9999,9 +10036,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputRecordSchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputRecordSchema'", None, _errors__) _constructed = cls( fields=fields, type=type, @@ -10019,26 +10054,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -10059,21 +10086,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -10087,7 +10117,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10102,6 +10133,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -10110,6 +10142,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10121,6 +10154,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -10129,6 +10163,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10140,6 +10175,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -10148,6 +10184,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10159,6 +10196,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -10292,9 +10330,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10308,9 +10344,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputEnumSchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputEnumSchema'", None, _errors__) _constructed = cls( symbols=symbols, type=type, @@ -10327,26 +10361,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -10367,21 +10393,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -10395,7 +10424,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -10410,6 +10440,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -10418,6 +10449,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10429,6 +10461,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -10437,6 +10470,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10448,6 +10482,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -10456,6 +10491,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10467,6 +10503,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -10600,9 +10637,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10616,9 +10651,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputArraySchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputArraySchema'", None, _errors__) _constructed = cls( items=items, type=type, @@ -10635,26 +10668,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -10675,21 +10700,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -10703,7 +10731,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -10712,6 +10741,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10723,6 +10753,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -10731,6 +10762,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10742,6 +10774,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -10750,6 +10783,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10761,6 +10795,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -10769,6 +10804,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10780,6 +10816,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -11049,9 +11086,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11088,41 +11123,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -11147,21 +11170,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -11175,7 +11201,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -11190,6 +11217,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -11198,6 +11226,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11209,6 +11238,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -11217,6 +11247,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11228,6 +11259,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -11236,6 +11268,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11247,6 +11280,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -11255,6 +11289,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11266,6 +11301,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -11280,6 +11316,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -11288,6 +11325,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11299,6 +11337,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -11307,6 +11346,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11318,6 +11358,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -11326,6 +11367,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11337,6 +11379,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -11596,9 +11639,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11612,9 +11653,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputParameter'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputParameter'", None, _errors__) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -11636,41 +11675,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -11695,21 +11722,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -11723,7 +11753,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -11738,6 +11769,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -11746,6 +11778,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11757,6 +11790,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -11765,6 +11799,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11776,6 +11811,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -11784,6 +11820,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11795,6 +11832,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -11803,6 +11841,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11814,6 +11853,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -11822,6 +11862,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11833,6 +11874,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -11847,6 +11889,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -11855,6 +11898,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11866,6 +11910,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -12302,9 +12347,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -12348,41 +12391,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -12409,21 +12440,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -12437,7 +12471,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -12452,6 +12487,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -12460,6 +12496,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12471,6 +12508,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -12479,6 +12517,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12490,6 +12529,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -12498,6 +12538,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12509,6 +12550,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -12517,6 +12559,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12528,6 +12571,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -12536,6 +12580,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12547,6 +12592,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -12555,6 +12601,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12566,11 +12613,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) + u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12582,6 +12628,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.baseCommand is not None and "baseCommand" not in r: r["baseCommand"] = save( @@ -12590,6 +12637,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12601,6 +12649,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.arguments is not None and "arguments" not in r: r["arguments"] = save( @@ -12609,6 +12658,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12620,6 +12670,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.stdin is not None and "stdin" not in r: r["stdin"] = save( @@ -12628,6 +12679,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12639,6 +12691,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.stderr is not None and "stderr" not in r: r["stderr"] = save( @@ -12647,6 +12700,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12658,6 +12712,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.stdout is not None and "stdout" not in r: r["stdout"] = save( @@ -12666,6 +12721,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12677,6 +12733,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.successCodes is not None and "successCodes" not in r: r["successCodes"] = save( @@ -12685,6 +12742,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12696,6 +12754,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.temporaryFailCodes is not None and "temporaryFailCodes" not in r: r["temporaryFailCodes"] = save( @@ -12704,6 +12763,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12715,6 +12775,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.permanentFailCodes is not None and "permanentFailCodes" not in r: r["permanentFailCodes"] = save( @@ -12723,6 +12784,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12734,6 +12796,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -12988,9 +13051,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -13023,26 +13084,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -13065,21 +13118,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -13093,7 +13149,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.dockerPull is not None and "dockerPull" not in r: r["dockerPull"] = save( @@ -13102,6 +13159,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13113,6 +13171,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dockerLoad is not None and "dockerLoad" not in r: r["dockerLoad"] = save( @@ -13121,6 +13180,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13132,6 +13192,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dockerFile is not None and "dockerFile" not in r: r["dockerFile"] = save( @@ -13140,6 +13201,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13151,6 +13213,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dockerImport is not None and "dockerImport" not in r: r["dockerImport"] = save( @@ -13159,6 +13222,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13170,6 +13234,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dockerImageId is not None and "dockerImageId" not in r: r["dockerImageId"] = save( @@ -13178,6 +13243,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13189,6 +13255,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dockerOutputDirectory is not None and "dockerOutputDirectory" not in r: r["dockerOutputDirectory"] = save( @@ -13197,6 +13264,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13208,6 +13276,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -13300,16 +13369,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `packages`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `packages`".format(k), SourceLine(_doc, k, str), ) ) @@ -13330,26 +13395,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -13372,21 +13429,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -13400,7 +13460,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.packages is not None and "packages" not in r: r["packages"] = save( @@ -13409,6 +13470,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13420,6 +13482,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -13535,9 +13598,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -13567,26 +13628,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -13607,21 +13660,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -13635,7 +13691,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.package is not None and "package" not in r: r["package"] = save( @@ -13644,6 +13701,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13655,6 +13713,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.version is not None and "version" not in r: r["version"] = save( @@ -13663,6 +13722,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13674,6 +13734,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.specs is not None and "specs" not in r: u = save_relative_uri(self.specs, base_url, False, None, relative_uris) @@ -13688,6 +13749,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -13811,9 +13873,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -13843,26 +13903,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -13883,21 +13935,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -13911,7 +13966,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.entryname is not None and "entryname" not in r: r["entryname"] = save( @@ -13920,6 +13976,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13931,6 +13988,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.entry is not None and "entry" not in r: r["entry"] = save( @@ -13939,6 +13997,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13950,6 +14009,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.writable is not None and "writable" not in r: r["writable"] = save( @@ -13958,6 +14018,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13969,6 +14030,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -14049,25 +14111,19 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `listing`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `listing`".format(k), SourceLine(_doc, k, str), ) ) break if _errors__: - raise ValidationException( - "Trying 'InitialWorkDirRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'InitialWorkDirRequirement'", None, _errors__) _constructed = cls( listing=listing, extension_fields=extension_fields, @@ -14081,26 +14137,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -14123,21 +14171,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14151,7 +14202,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.listing is not None and "listing" not in r: r["listing"] = save( @@ -14160,6 +14212,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14171,6 +14224,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -14253,16 +14307,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `envDef`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `envDef`".format(k), SourceLine(_doc, k, str), ) ) @@ -14283,26 +14333,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -14325,21 +14367,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14353,7 +14398,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.envDef is not None and "envDef" not in r: r["envDef"] = save( @@ -14362,6 +14408,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14373,6 +14420,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -14443,9 +14491,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -14457,9 +14503,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'ShellCommandRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'ShellCommandRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -14472,26 +14516,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -14514,21 +14550,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14542,7 +14581,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -14808,9 +14848,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -14845,26 +14883,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -14887,21 +14917,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14915,7 +14948,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.coresMin is not None and "coresMin" not in r: r["coresMin"] = save( @@ -14924,6 +14958,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14935,6 +14970,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.coresMax is not None and "coresMax" not in r: r["coresMax"] = save( @@ -14943,6 +14979,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14954,6 +14991,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.ramMin is not None and "ramMin" not in r: r["ramMin"] = save( @@ -14962,6 +15000,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14973,6 +15012,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.ramMax is not None and "ramMax" not in r: r["ramMax"] = save( @@ -14981,6 +15021,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14992,6 +15033,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.tmpdirMin is not None and "tmpdirMin" not in r: r["tmpdirMin"] = save( @@ -15000,6 +15042,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15011,6 +15054,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.tmpdirMax is not None and "tmpdirMax" not in r: r["tmpdirMax"] = save( @@ -15019,6 +15063,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15030,6 +15075,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outdirMin is not None and "outdirMin" not in r: r["outdirMin"] = save( @@ -15038,6 +15084,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15049,6 +15096,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outdirMax is not None and "outdirMax" not in r: r["outdirMax"] = save( @@ -15057,6 +15105,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15068,6 +15117,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -15323,9 +15373,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -15339,9 +15387,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'ExpressionToolOutputParameter'", None, _errors__ - ) + raise ValidationException("Trying 'ExpressionToolOutputParameter'", None, _errors__) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -15363,41 +15409,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -15422,21 +15456,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15450,7 +15487,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -15465,6 +15503,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -15473,6 +15512,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15484,6 +15524,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -15492,6 +15533,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15503,6 +15545,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -15511,6 +15554,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15522,6 +15566,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -15530,6 +15575,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15541,6 +15587,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -15549,6 +15596,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15560,6 +15608,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -15574,6 +15623,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -15582,6 +15632,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15593,6 +15644,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -15872,9 +15924,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -15911,41 +15961,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -15972,21 +16010,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -16000,7 +16041,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -16015,6 +16057,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -16023,6 +16066,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16034,6 +16078,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -16042,6 +16087,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16053,6 +16099,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -16061,6 +16108,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16072,6 +16120,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -16080,6 +16129,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16091,6 +16141,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -16099,6 +16150,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16110,6 +16162,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -16118,6 +16171,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16129,11 +16183,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) + u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16145,6 +16198,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.expression is not None and "expression" not in r: r["expression"] = save( @@ -16153,6 +16207,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16164,6 +16219,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -16471,9 +16527,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -16487,9 +16541,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'WorkflowOutputParameter'", None, _errors__ - ) + raise ValidationException("Trying 'WorkflowOutputParameter'", None, _errors__) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -16513,41 +16565,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -16572,21 +16612,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -16600,7 +16643,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -16615,6 +16659,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -16623,6 +16668,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16634,6 +16680,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -16642,6 +16689,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16653,6 +16701,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -16661,6 +16710,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16672,6 +16722,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -16680,6 +16731,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16691,6 +16743,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -16699,6 +16752,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16710,6 +16764,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -16724,11 +16779,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputSource is not None and "outputSource" not in r: - u = save_relative_uri( - self.outputSource, str(self.id), False, 1, relative_uris - ) + u = save_relative_uri(self.outputSource, str(self.id), False, 1, relative_uris) r["outputSource"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16740,6 +16794,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.linkMerge is not None and "linkMerge" not in r: r["linkMerge"] = save( @@ -16748,6 +16803,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16759,6 +16815,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -16767,6 +16824,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16778,6 +16836,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -16889,9 +16948,7 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash( - (self.source, self.linkMerge, self.id, self.default, self.valueFrom) - ) + return hash((self.source, self.linkMerge, self.id, self.default, self.valueFrom)) @classmethod def fromDoc( @@ -17009,9 +17066,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17044,41 +17099,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -17103,21 +17146,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -17131,7 +17177,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -17146,6 +17193,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.source is not None and "source" not in r: u = save_relative_uri(self.source, str(self.id), False, 2, relative_uris) @@ -17160,6 +17208,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.linkMerge is not None and "linkMerge" not in r: r["linkMerge"] = save( @@ -17168,6 +17217,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17179,6 +17229,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -17187,6 +17238,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17198,6 +17250,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.valueFrom is not None and "valueFrom" not in r: r["valueFrom"] = save( @@ -17206,6 +17259,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17217,6 +17271,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -17308,9 +17363,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17337,41 +17390,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -17396,21 +17437,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -17424,7 +17468,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -17439,6 +17484,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -17776,9 +17822,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17816,41 +17860,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -17875,21 +17907,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -17903,7 +17938,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -17918,6 +17954,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.in_ is not None and "in" not in r: r["in"] = save( @@ -17926,6 +17963,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17937,6 +17975,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.out is not None and "out" not in r: u = save_relative_uri(self.out, str(self.id), True, None, relative_uris) @@ -17951,6 +17990,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -17959,6 +17999,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17970,6 +18011,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -17978,6 +18020,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17989,6 +18032,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -17997,6 +18041,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18008,6 +18053,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -18016,6 +18062,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18027,6 +18074,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.run is not None and "run" not in r: u = save_relative_uri(self.run, str(self.id), False, None, relative_uris) @@ -18041,6 +18089,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.scatter is not None and "scatter" not in r: u = save_relative_uri(self.scatter, str(self.id), False, 0, relative_uris) @@ -18055,11 +18104,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.scatterMethod is not None and "scatterMethod" not in r: - u = save_relative_uri( - self.scatterMethod, str(self.id), False, None, relative_uris - ) + u = save_relative_uri(self.scatterMethod, str(self.id), False, None, relative_uris) r["scatterMethod"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18071,6 +18119,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -18396,9 +18445,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -18435,41 +18482,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -18496,21 +18531,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -18524,7 +18562,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18539,6 +18578,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -18547,6 +18587,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18558,6 +18599,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -18566,6 +18608,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18577,6 +18620,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -18585,6 +18629,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18596,6 +18641,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -18604,6 +18650,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18615,6 +18662,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -18623,6 +18671,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18634,6 +18683,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -18642,6 +18692,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18653,11 +18704,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) + u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18669,6 +18719,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.steps is not None and "steps" not in r: r["steps"] = save( @@ -18677,6 +18728,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18688,6 +18740,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -18766,9 +18819,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -18780,9 +18831,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'SubworkflowFeatureRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'SubworkflowFeatureRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -18795,26 +18844,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -18837,21 +18878,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -18865,7 +18909,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -18931,9 +18976,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -18945,9 +18988,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'ScatterFeatureRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'ScatterFeatureRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -18960,26 +19001,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -19002,21 +19035,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -19030,7 +19066,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -19096,9 +19133,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19110,9 +19145,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'MultipleInputFeatureRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'MultipleInputFeatureRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -19125,26 +19158,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -19167,21 +19192,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -19195,7 +19223,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -19261,9 +19290,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19275,9 +19302,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'StepInputExpressionRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'StepInputExpressionRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -19290,26 +19315,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -19332,21 +19349,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -19360,7 +19380,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -19889,18 +19910,14 @@ def save( union_of_None_type_or_strtype, True, False, None ) Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") -uri_Directory_classLoader_False_True_None = _URILoader( - Directory_classLoader, False, True, None -) +uri_Directory_classLoader_False_True_None = _URILoader(Directory_classLoader, False, True, None) union_of_strtype_or_ExpressionLoader = _UnionLoader( ( strtype, ExpressionLoader, ) ) -array_of_union_of_strtype_or_ExpressionLoader = _ArrayLoader( - union_of_strtype_or_ExpressionLoader -) +array_of_union_of_strtype_or_ExpressionLoader = _ArrayLoader(union_of_strtype_or_ExpressionLoader) union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader = _UnionLoader( ( None_type, @@ -20014,11 +20031,13 @@ def save( ExpressionLoader, ) ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, - True, - False, - None, +uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = ( + _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, + True, + False, + None, + ) ) union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type = _UnionLoader( ( @@ -20149,17 +20168,17 @@ def save( uri_SchemaDefRequirement_classLoader_False_True_None = _URILoader( SchemaDefRequirement_classLoader, False, True, None ) -union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = ( - _UnionLoader( - ( - InputRecordSchemaLoader, - InputEnumSchemaLoader, - InputArraySchemaLoader, - ) +union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = _UnionLoader( + ( + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, ) ) -array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = _ArrayLoader( - union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader +array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = ( + _ArrayLoader( + union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader + ) ) union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype = _UnionLoader( ( @@ -20202,10 +20221,8 @@ def save( array_of_CommandInputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( - _IdMapLoader( - union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" - ) +idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" ) union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( ( @@ -20240,10 +20257,8 @@ def save( array_of_CommandOutputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( - _IdMapLoader( - union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" - ) +idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" ) union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( ( @@ -20296,13 +20311,15 @@ def save( CommandLineBindingLoader, ) ) -array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( - _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) +array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _ArrayLoader( + union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader ) -union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( - ( - None_type, - array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, +union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( + _UnionLoader( + ( + None_type, + array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + ) ) ) array_of_inttype = _ArrayLoader(inttype) @@ -20312,15 +20329,11 @@ def save( array_of_inttype, ) ) -DockerRequirement_classLoader = _EnumLoader( - ("DockerRequirement",), "DockerRequirement_class" -) +DockerRequirement_classLoader = _EnumLoader(("DockerRequirement",), "DockerRequirement_class") uri_DockerRequirement_classLoader_False_True_None = _URILoader( DockerRequirement_classLoader, False, True, None ) -SoftwareRequirement_classLoader = _EnumLoader( - ("SoftwareRequirement",), "SoftwareRequirement_class" -) +SoftwareRequirement_classLoader = _EnumLoader(("SoftwareRequirement",), "SoftwareRequirement_class") uri_SoftwareRequirement_classLoader_False_True_None = _URILoader( SoftwareRequirement_classLoader, False, True, None ) @@ -20337,17 +20350,21 @@ def save( uri_InitialWorkDirRequirement_classLoader_False_True_None = _URILoader( InitialWorkDirRequirement_classLoader, False, True, None ) -union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = _UnionLoader( - ( - FileLoader, - DirectoryLoader, - DirentLoader, - strtype, - ExpressionLoader, +union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = ( + _UnionLoader( + ( + FileLoader, + DirectoryLoader, + DirentLoader, + strtype, + ExpressionLoader, + ) ) ) -array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = _ArrayLoader( - union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader +array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = ( + _ArrayLoader( + union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader + ) ) union_of_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader_or_strtype_or_ExpressionLoader = _UnionLoader( ( @@ -20356,9 +20373,7 @@ def save( ExpressionLoader, ) ) -EnvVarRequirement_classLoader = _EnumLoader( - ("EnvVarRequirement",), "EnvVarRequirement_class" -) +EnvVarRequirement_classLoader = _EnumLoader(("EnvVarRequirement",), "EnvVarRequirement_class") uri_EnvVarRequirement_classLoader_False_True_None = _URILoader( EnvVarRequirement_classLoader, False, True, None ) @@ -20372,9 +20387,7 @@ def save( uri_ShellCommandRequirement_classLoader_False_True_None = _URILoader( ShellCommandRequirement_classLoader, False, True, None ) -ResourceRequirement_classLoader = _EnumLoader( - ("ResourceRequirement",), "ResourceRequirement_class" -) +ResourceRequirement_classLoader = _EnumLoader(("ResourceRequirement",), "ResourceRequirement_class") uri_ResourceRequirement_classLoader_False_True_None = _URILoader( ResourceRequirement_classLoader, False, True, None ) @@ -20405,9 +20418,7 @@ def save( uri_ExpressionTool_classLoader_False_True_None = _URILoader( ExpressionTool_classLoader, False, True, None ) -array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( - ExpressionToolOutputParameterLoader -) +array_of_ExpressionToolOutputParameterLoader = _ArrayLoader(ExpressionToolOutputParameterLoader) idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( array_of_ExpressionToolOutputParameterLoader, "id", "type" ) @@ -20439,13 +20450,11 @@ def save( union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) ) -uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = ( - _URILoader( - union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, - True, - False, - None, - ) +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = _URILoader( + union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, + True, + False, + None, ) array_of_Any_type = _ArrayLoader(Any_type) union_of_None_type_or_array_of_Any_type = _UnionLoader( @@ -20457,14 +20466,12 @@ def save( idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( union_of_None_type_or_array_of_Any_type, "class", "None" ) -union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( - _UnionLoader( - ( - strtype, - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, - ) +union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( + ( + strtype, + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, ) ) uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None = _URILoader( @@ -20486,9 +20493,7 @@ def save( union_of_None_type_or_ScatterMethodLoader, False, True, None ) Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") -uri_Workflow_classLoader_False_True_None = _URILoader( - Workflow_classLoader, False, True, None -) +uri_Workflow_classLoader_False_True_None = _URILoader(Workflow_classLoader, False, True, None) array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( array_of_WorkflowOutputParameterLoader, "id", "type" @@ -20529,10 +20534,8 @@ def save( WorkflowLoader, ) ) -array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( - _ArrayLoader( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader - ) +array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _ArrayLoader( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader ) union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( ( diff --git a/schema_salad/tests/cwl_v1_1.py b/schema_salad/tests/cwl_v1_1.py index 2d851808e..1488c286a 100644 --- a/schema_salad/tests/cwl_v1_1.py +++ b/schema_salad/tests/cwl_v1_1.py @@ -24,6 +24,7 @@ Type, Union, cast, + no_type_check, ) from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit from urllib.request import pathname2url @@ -47,6 +48,7 @@ doc_line_info = CommentedMap() + class LoadingOptions: idx: IdxType fileuri: Optional[str] @@ -206,6 +208,8 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: """Convert this object to a JSON/YAML friendly dictionary.""" @@ -245,28 +249,41 @@ def add_kv( max_len: int, cols: Dict[int, int], min_col: int = 0, - inserted_line_info: Dict[int, int] = {} -) -> int: + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, +) -> Tuple[int, Optional[Dict[int, int]]]: """Add key value pair into Commented Map. Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers for each key/val pair in the old CommentedMap,key/val pair to insert, max_line of the old CommentedMap, and max col value taken for each line. """ + if inserted_line_info is None: + inserted_line_info = {} + if len(inserted_line_info.keys()) >= 1: max_line = max(inserted_line_info.keys()) + 1 else: max_line = 0 - if ( - key in line_numbers - ): # If the key to insert is in the original CommentedMap as a key - line_info = old_doc.lc.data[key] - if line_info[0] not in inserted_line_info: - new_doc.lc.add_kv_line_col(key, old_doc.lc.data[key]) - inserted_line_info[old_doc.lc.data[key][0]] = old_doc.lc.data[key][1] - else: - line = line_info[0] - while line in inserted_line_info.keys(): + + if key in line_numbers: # If the passed key to insert is in the original CommentedMap as a key + line_info = old_doc.lc.data[key] # Get the line information for the key + if ( + line_info[0] + shift not in inserted_line_info + ): # If the line of the key + shift isn't taken, add it + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[key][0] + shift, + old_doc.lc.data[key][1], + old_doc.lc.data[key][2] + shift, + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[old_doc.lc.data[key][0] + shift] = old_doc.lc.data[key][1] + else: # If the line is already taken + line = line_info[0] + shift + while line in inserted_line_info.keys(): # Find the closest free line line += 1 new_doc.lc.add_kv_line_col( key, @@ -278,64 +295,93 @@ def add_kv( ], ) inserted_line_info[line] = old_doc.lc.data[key][1] - return max_len + return max_len, inserted_line_info elif isinstance(val, (int, float, str)) and not isinstance( val, bool ): # If the value is hashable if val in line_numbers: # If the value is in the original CommentedMap - line = line_numbers[val]["line"] - if line in inserted_line_info: + line = line_numbers[val]["line"] + shift # Get the line info for the value + if line in inserted_line_info: # Get the appropriate line to place value on line = max_line - if line in cols: - col = max(line_numbers[val]["col"], cols[line]) - else: - col = line_numbers[val]["col"] + + col = line_numbers[val]["col"] new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) inserted_line_info[line] = col + len(key) + 2 - cols[line] = col + len("id") + 2 - return max_len - elif isinstance(val, str): + return max_len, inserted_line_info + elif isinstance(val, str): # Logic for DSL expansition with "?" if val + "?" in line_numbers: - line = line_numbers[val + "?"]["line"] + line = line_numbers[val + "?"]["line"] + shift if line in inserted_line_info: line = max_line - if line in cols: - col = max(line_numbers[val + "?"]["col"], cols[line]) - else: - col = line_numbers[val + "?"]["col"] + col = line_numbers[val + "?"]["col"] new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) inserted_line_info[line] = col + len(key) + 2 - cols[line] = col + len("id") + 2 - return max_len + return max_len, inserted_line_info elif old_doc: if val in old_doc: index = old_doc.lc.data.index(val) line_info = old_doc.lc.data[index] - if line_info[0] not in inserted_line_info: - new_doc.lc.add_kv_line_col(key, old_doc.lc.data[index]) - inserted_line_info[old_doc.lc.data[index][0]] = old_doc.lc.data[ - index - ][1] + if line_info[0] + shift not in inserted_line_info: + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[index][0] + shift, + old_doc.lc.data[index][1], + old_doc.lc.data[index][2] + shift, + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[old_doc.lc.data[index][0] + shift] = old_doc.lc.data[index][ + 1 + ] else: new_doc.lc.add_kv_line_col( key, [ - max_line, + max_line + shift, old_doc.lc.data[index][1], - max_line + (max_line - old_doc.lc.data[index][2]), + max_line + (max_line - old_doc.lc.data[index][2]) + shift, old_doc.lc.data[index][3], ], ) - inserted_line_info[max_line] = old_doc.lc.data[index][1] - # If neither the key or value is in the original CommentedMap (or value is not hashable) - new_doc.lc.add_kv_line_col( - key, [max_line, min_col, max_line, min_col + len(key) + 2] - ) + inserted_line_info[max_line + shift] = old_doc.lc.data[index][1] + # If neither the key or value is in the original CommentedMap/old doc (or value is not hashable) + new_doc.lc.add_kv_line_col(key, [max_line, min_col, max_line, min_col + len(key) + 2]) inserted_line_info[max_line] = min_col + len(key) + 2 - return max_len + 1 + return max_len + 1, inserted_line_info + + +@no_type_check +def iterate_through_doc(keys: List[Any]) -> Optional[CommentedMap]: + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + return None + else: + return None + if isinstance(doc, CommentedSeq): + to_return = CommentedMap() + for index, key in enumerate(doc): + to_return[key] = "" + to_return.lc.add_kv_line_col( + key, + [ + doc.lc.data[index][0], + doc.lc.data[index][1], + doc.lc.data[index][0], + doc.lc.data[index][1], + ], + ) + return to_return + return doc -def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: +def get_line_numbers(doc: Optional[CommentedMap]) -> Dict[Any, Dict[str, int]]: """Get line numbers for kv pairs in CommentedMap. For each key/value pair in a CommentedMap, save the line/col info into a dictionary, @@ -390,7 +436,8 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> save_type: """Save a val of any type. @@ -399,22 +446,17 @@ def save( """ if keys is None: keys = [] - doc = doc_line_info - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) if isinstance(val, Saveable): return val.save( - top=top, base_url=base_url, relative_uris=relative_uris, keys=keys, inserted_line_info=inserted_line_info + top=top, + base_url=base_url, + relative_uris=relative_uris, + keys=keys, + inserted_line_info=inserted_line_info, + shift=shift, ) if isinstance(val, MutableSequence): r = CommentedSeq() @@ -432,7 +474,8 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=new_keys, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) ) return r @@ -454,6 +497,7 @@ def save( relative_uris=relative_uris, keys=new_keys, inserted_line_info=inserted_line_info, + shift=shift, ) return newdict @@ -1093,9 +1137,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, RecordField): return bool( - self.doc == other.doc - and self.name == other.name - and self.type == other.type + self.doc == other.doc and self.name == other.name and self.type == other.type ) return False @@ -1179,16 +1221,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `doc`, `name`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1212,26 +1250,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -1252,21 +1282,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1280,7 +1313,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1295,6 +1329,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -1303,6 +1338,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1314,6 +1350,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -1322,6 +1359,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1333,6 +1371,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -1424,16 +1463,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `fields`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1455,26 +1490,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -1495,21 +1522,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1523,7 +1553,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -1532,6 +1563,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1543,6 +1575,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -1551,6 +1584,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1562,6 +1596,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -1655,16 +1690,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `symbols`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1686,26 +1717,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -1726,21 +1749,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1754,7 +1780,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -1769,6 +1796,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -1777,6 +1805,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1788,6 +1817,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -1876,16 +1906,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `items`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1907,26 +1933,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -1947,21 +1965,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1975,7 +1996,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -1984,6 +2006,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1995,6 +2018,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -2003,6 +2027,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2014,6 +2039,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -2391,9 +2417,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2431,26 +2455,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -2473,21 +2489,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -2501,7 +2520,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -2516,6 +2536,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.path is not None and "path" not in r: u = save_relative_uri(self.path, base_url, False, None, relative_uris) @@ -2530,6 +2551,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.basename is not None and "basename" not in r: r["basename"] = save( @@ -2538,6 +2560,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2549,6 +2572,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dirname is not None and "dirname" not in r: r["dirname"] = save( @@ -2557,6 +2581,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2568,6 +2593,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.nameroot is not None and "nameroot" not in r: r["nameroot"] = save( @@ -2576,6 +2602,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2587,6 +2614,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.nameext is not None and "nameext" not in r: r["nameext"] = save( @@ -2595,6 +2623,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2606,6 +2635,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.checksum is not None and "checksum" not in r: r["checksum"] = save( @@ -2614,6 +2644,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2625,6 +2656,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.size is not None and "size" not in r: r["size"] = save( @@ -2633,6 +2665,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2644,6 +2677,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -2652,6 +2686,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2663,6 +2698,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, base_url, True, None, relative_uris) @@ -2677,6 +2713,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.contents is not None and "contents" not in r: r["contents"] = save( @@ -2685,6 +2722,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2696,6 +2734,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -2808,9 +2847,7 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash( - (self.class_, self.location, self.path, self.basename, self.listing) - ) + return hash((self.class_, self.location, self.path, self.basename, self.listing)) @classmethod def fromDoc( @@ -2905,9 +2942,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2938,26 +2973,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -2980,21 +3007,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3008,7 +3038,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -3023,6 +3054,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.path is not None and "path" not in r: u = save_relative_uri(self.path, base_url, False, None, relative_uris) @@ -3037,6 +3069,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.basename is not None and "basename" not in r: r["basename"] = save( @@ -3045,6 +3078,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3056,6 +3090,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.listing is not None and "listing" not in r: r["listing"] = save( @@ -3064,6 +3099,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3075,6 +3111,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -3182,16 +3219,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `loadContents`".format( - k - ), + "invalid field `{}`, expected one of: `loadContents`".format(k), SourceLine(_doc, k, str), ) ) @@ -3212,26 +3245,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -3252,21 +3277,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3280,7 +3308,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -3289,6 +3318,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3300,6 +3330,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -3574,9 +3605,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3613,26 +3642,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -3653,21 +3674,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3681,7 +3705,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3696,6 +3721,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -3704,6 +3730,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3715,6 +3742,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -3723,6 +3751,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3734,6 +3763,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -3742,6 +3772,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3753,6 +3784,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -3761,6 +3793,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3772,6 +3805,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -3780,6 +3814,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3791,11 +3826,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: - u = save_relative_uri( - self.format, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.format, str(self.name), True, None, relative_uris) r["format"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3807,6 +3841,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -3815,6 +3850,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3826,6 +3862,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -3834,6 +3871,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3845,6 +3883,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -4023,9 +4062,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4058,26 +4095,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -4098,21 +4127,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4126,7 +4158,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4141,6 +4174,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -4149,6 +4183,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4160,6 +4195,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4168,6 +4204,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4179,6 +4216,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -4187,6 +4225,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4198,6 +4237,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -4206,6 +4246,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4217,6 +4258,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -4380,9 +4422,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4415,26 +4455,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -4455,21 +4487,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4483,7 +4518,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4498,11 +4534,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4514,6 +4549,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4522,6 +4558,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4533,6 +4570,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -4541,6 +4579,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4552,6 +4591,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -4560,6 +4600,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4571,6 +4612,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -4734,9 +4776,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4769,26 +4809,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -4809,21 +4841,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4837,7 +4872,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4852,6 +4888,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -4860,6 +4897,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4871,6 +4909,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4879,6 +4918,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4890,6 +4930,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -4898,6 +4939,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4909,6 +4951,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -4917,6 +4960,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4928,6 +4972,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -5146,9 +5191,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5183,26 +5226,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -5223,21 +5258,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -5251,7 +5289,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5266,6 +5305,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -5274,6 +5314,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5285,6 +5326,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -5293,6 +5335,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5304,6 +5347,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -5312,6 +5356,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5323,6 +5368,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -5331,6 +5377,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5342,6 +5389,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -5350,6 +5398,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5361,11 +5410,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: - u = save_relative_uri( - self.format, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.format, str(self.name), True, None, relative_uris) r["format"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5377,6 +5425,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -5387,9 +5436,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - ["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"] - ) + attrs = frozenset(["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"]) class OutputRecordSchema(RecordSchema, OutputSchema): @@ -5545,9 +5592,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5580,26 +5625,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -5620,21 +5657,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -5648,7 +5688,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5663,6 +5704,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -5671,6 +5713,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5682,6 +5725,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -5690,6 +5734,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5701,6 +5746,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -5709,6 +5755,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5720,6 +5767,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -5728,6 +5776,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5739,6 +5788,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -5902,9 +5952,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5937,26 +5985,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -5977,21 +6017,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6005,7 +6048,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -6020,11 +6064,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6036,6 +6079,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -6044,6 +6088,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6055,6 +6100,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -6063,6 +6109,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6074,6 +6121,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -6082,6 +6130,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6093,6 +6142,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -6256,9 +6306,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6291,26 +6339,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -6331,21 +6371,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6359,7 +6402,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -6374,6 +6418,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -6382,6 +6427,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6393,6 +6439,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -6401,6 +6448,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6412,6 +6460,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -6420,6 +6469,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6431,6 +6481,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -6439,6 +6490,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6450,6 +6502,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -6525,10 +6578,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, InlineJavascriptRequirement): - return bool( - self.class_ == other.class_ - and self.expressionLib == other.expressionLib - ) + return bool(self.class_ == other.class_ and self.expressionLib == other.expressionLib) return False def __hash__(self) -> int: @@ -6573,9 +6623,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6589,9 +6637,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'InlineJavascriptRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'InlineJavascriptRequirement'", None, _errors__) _constructed = cls( expressionLib=expressionLib, extension_fields=extension_fields, @@ -6605,26 +6651,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -6647,21 +6685,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6675,7 +6716,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.expressionLib is not None and "expressionLib" not in r: r["expressionLib"] = save( @@ -6684,6 +6726,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6695,6 +6738,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -6786,16 +6830,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `types`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `types`".format(k), SourceLine(_doc, k, str), ) ) @@ -6816,26 +6856,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -6858,21 +6890,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6886,7 +6921,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.types is not None and "types" not in r: r["types"] = save( @@ -6895,6 +6931,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6906,6 +6943,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -6941,9 +6979,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, SecondaryFileSchema): - return bool( - self.pattern == other.pattern and self.required == other.required - ) + return bool(self.pattern == other.pattern and self.required == other.required) return False def __hash__(self) -> int: @@ -6999,16 +7035,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `pattern`, `required`".format( - k - ), + "invalid field `{}`, expected one of: `pattern`, `required`".format(k), SourceLine(_doc, k, str), ) ) @@ -7030,26 +7062,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -7070,21 +7094,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7098,7 +7125,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.pattern is not None and "pattern" not in r: r["pattern"] = save( @@ -7107,6 +7135,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7118,6 +7147,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.required is not None and "required" not in r: r["required"] = save( @@ -7126,6 +7156,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7137,6 +7168,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -7177,9 +7209,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, LoadListingRequirement): - return bool( - self.class_ == other.class_ and self.loadListing == other.loadListing - ) + return bool(self.class_ == other.class_ and self.loadListing == other.loadListing) return False def __hash__(self) -> int: @@ -7224,25 +7254,19 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `loadListing`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `loadListing`".format(k), SourceLine(_doc, k, str), ) ) break if _errors__: - raise ValidationException( - "Trying 'LoadListingRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'LoadListingRequirement'", None, _errors__) _constructed = cls( loadListing=loadListing, extension_fields=extension_fields, @@ -7256,26 +7280,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -7298,21 +7314,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7326,7 +7345,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -7335,6 +7355,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7346,6 +7367,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -7388,9 +7410,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, EnvironmentDef): - return bool( - self.envName == other.envName and self.envValue == other.envValue - ) + return bool(self.envName == other.envName and self.envValue == other.envValue) return False def __hash__(self) -> int: @@ -7443,16 +7463,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `envName`, `envValue`".format( - k - ), + "invalid field `{}`, expected one of: `envName`, `envValue`".format(k), SourceLine(_doc, k, str), ) ) @@ -7474,26 +7490,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -7514,21 +7522,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7542,7 +7553,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.envName is not None and "envName" not in r: r["envName"] = save( @@ -7551,6 +7563,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7562,6 +7575,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.envValue is not None and "envValue" not in r: r["envValue"] = save( @@ -7570,6 +7584,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7581,6 +7596,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -7832,9 +7848,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -7868,26 +7882,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -7908,21 +7914,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7936,7 +7945,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -7945,6 +7955,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7956,6 +7967,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.position is not None and "position" not in r: r["position"] = save( @@ -7964,6 +7976,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7975,6 +7988,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.prefix is not None and "prefix" not in r: r["prefix"] = save( @@ -7983,6 +7997,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7994,6 +8009,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.separate is not None and "separate" not in r: r["separate"] = save( @@ -8002,6 +8018,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8013,6 +8030,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.itemSeparator is not None and "itemSeparator" not in r: r["itemSeparator"] = save( @@ -8021,6 +8039,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8032,6 +8051,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.valueFrom is not None and "valueFrom" not in r: r["valueFrom"] = save( @@ -8040,6 +8060,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8051,6 +8072,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.shellQuote is not None and "shellQuote" not in r: r["shellQuote"] = save( @@ -8059,6 +8081,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8070,6 +8093,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -8233,9 +8257,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8266,26 +8288,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -8306,21 +8320,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8334,7 +8351,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8343,6 +8361,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8354,6 +8373,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -8362,6 +8382,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8373,6 +8394,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.glob is not None and "glob" not in r: r["glob"] = save( @@ -8381,6 +8403,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8392,6 +8415,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputEval is not None and "outputEval" not in r: r["outputEval"] = save( @@ -8400,6 +8424,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8411,6 +8436,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -8485,16 +8511,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `inputBinding`".format( - k - ), + "invalid field `{}`, expected one of: `inputBinding`".format(k), SourceLine(_doc, k, str), ) ) @@ -8515,26 +8537,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -8555,21 +8569,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8583,7 +8600,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -8592,6 +8610,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8603,6 +8622,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -8887,9 +8907,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8903,9 +8921,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandInputRecordField'", None, _errors__ - ) + raise ValidationException("Trying 'CommandInputRecordField'", None, _errors__) _constructed = cls( doc=doc, name=name, @@ -8929,26 +8945,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -8969,21 +8977,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8997,7 +9008,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9012,6 +9024,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -9020,6 +9033,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9031,6 +9045,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -9039,6 +9054,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9050,6 +9066,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -9058,6 +9075,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9069,6 +9087,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -9077,6 +9096,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9088,6 +9108,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -9096,6 +9117,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9107,11 +9129,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: - u = save_relative_uri( - self.format, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.format, str(self.name), True, None, relative_uris) r["format"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9123,6 +9144,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -9131,6 +9153,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9142,6 +9165,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -9150,6 +9174,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9161,6 +9186,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -9169,6 +9195,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9180,6 +9207,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -9206,9 +9234,7 @@ def save( ) -class CommandInputRecordSchema( - InputRecordSchema, CommandInputSchema, CommandLineBindable -): +class CommandInputRecordSchema(InputRecordSchema, CommandInputSchema, CommandLineBindable): def __init__( self, type: Any, @@ -9249,9 +9275,7 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash( - (self.fields, self.type, self.label, self.doc, self.name, self.inputBinding) - ) + return hash((self.fields, self.type, self.label, self.doc, self.name, self.inputBinding)) @classmethod def fromDoc( @@ -9384,9 +9408,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9400,9 +9422,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandInputRecordSchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandInputRecordSchema'", None, _errors__) _constructed = cls( fields=fields, type=type, @@ -9422,26 +9442,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -9462,21 +9474,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -9490,7 +9505,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9505,6 +9521,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -9513,6 +9530,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9524,6 +9542,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -9532,6 +9551,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9543,6 +9563,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -9551,6 +9572,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9562,6 +9584,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -9570,6 +9593,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9581,6 +9605,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -9589,6 +9614,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9600,6 +9626,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -9793,9 +9820,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9809,9 +9834,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandInputEnumSchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandInputEnumSchema'", None, _errors__) _constructed = cls( symbols=symbols, type=type, @@ -9831,26 +9854,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -9871,21 +9886,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -9899,7 +9917,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9914,11 +9933,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9930,6 +9948,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -9938,6 +9957,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9949,6 +9969,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -9957,6 +9978,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9968,6 +9990,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -9976,6 +9999,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9987,6 +10011,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -9995,6 +10020,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10006,6 +10032,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -10019,9 +10046,7 @@ def save( attrs = frozenset(["symbols", "type", "label", "doc", "name", "inputBinding"]) -class CommandInputArraySchema( - InputArraySchema, CommandInputSchema, CommandLineBindable -): +class CommandInputArraySchema(InputArraySchema, CommandInputSchema, CommandLineBindable): def __init__( self, items: Any, @@ -10062,9 +10087,7 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash( - (self.items, self.type, self.label, self.doc, self.name, self.inputBinding) - ) + return hash((self.items, self.type, self.label, self.doc, self.name, self.inputBinding)) @classmethod def fromDoc( @@ -10194,9 +10217,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10210,9 +10231,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandInputArraySchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandInputArraySchema'", None, _errors__) _constructed = cls( items=items, type=type, @@ -10232,26 +10251,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -10272,21 +10283,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -10300,7 +10314,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10315,6 +10330,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -10323,6 +10339,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10334,6 +10351,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -10342,6 +10360,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10353,6 +10372,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -10361,6 +10381,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10372,6 +10393,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -10380,6 +10402,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10391,6 +10414,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -10399,6 +10423,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10410,6 +10435,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -10650,9 +10676,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10666,9 +10690,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputRecordField'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputRecordField'", None, _errors__) _constructed = cls( doc=doc, name=name, @@ -10690,26 +10712,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -10730,21 +10744,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -10758,7 +10775,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10773,6 +10791,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -10781,6 +10800,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10792,6 +10812,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -10800,6 +10821,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10811,6 +10833,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -10819,6 +10842,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10830,6 +10854,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -10838,6 +10863,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10849,6 +10875,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -10857,6 +10884,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10868,11 +10896,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: - u = save_relative_uri( - self.format, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.format, str(self.name), True, None, relative_uris) r["format"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10884,6 +10911,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -10892,6 +10920,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10903,6 +10932,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -11080,9 +11110,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11096,9 +11124,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputRecordSchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputRecordSchema'", None, _errors__) _constructed = cls( fields=fields, type=type, @@ -11117,26 +11143,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -11157,21 +11175,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -11185,7 +11206,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -11200,6 +11222,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -11208,6 +11231,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11219,6 +11243,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -11227,6 +11252,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11238,6 +11264,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -11246,6 +11273,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11257,6 +11285,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -11265,6 +11294,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11276,6 +11306,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -11439,9 +11470,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11455,9 +11484,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputEnumSchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputEnumSchema'", None, _errors__) _constructed = cls( symbols=symbols, type=type, @@ -11476,26 +11503,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -11516,21 +11535,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -11544,7 +11566,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -11559,11 +11582,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11575,6 +11597,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -11583,6 +11606,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11594,6 +11618,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -11602,6 +11627,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11613,6 +11639,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -11621,6 +11648,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11632,6 +11660,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -11795,9 +11824,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11811,9 +11838,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputArraySchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputArraySchema'", None, _errors__) _constructed = cls( items=items, type=type, @@ -11832,26 +11857,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -11872,21 +11889,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -11900,7 +11920,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -11915,6 +11936,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -11923,6 +11945,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11934,6 +11957,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -11942,6 +11966,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11953,6 +11978,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -11961,6 +11987,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11972,6 +11999,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -11980,6 +12008,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11991,6 +12020,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -12301,9 +12331,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -12342,41 +12370,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -12401,21 +12417,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -12429,7 +12448,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -12444,6 +12464,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -12452,6 +12473,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12463,6 +12485,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -12471,6 +12494,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12482,6 +12506,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -12490,6 +12515,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12501,6 +12527,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -12509,6 +12536,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12520,6 +12548,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -12534,6 +12563,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -12542,6 +12572,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12553,6 +12584,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -12561,6 +12593,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12572,6 +12605,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -12580,6 +12614,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12591,6 +12626,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -12599,6 +12635,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12610,6 +12647,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -12618,6 +12656,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12629,6 +12668,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -12887,9 +12927,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -12903,9 +12941,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputParameter'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputParameter'", None, _errors__) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -12927,41 +12963,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -12986,21 +13010,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -13014,7 +13041,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -13029,6 +13057,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -13037,6 +13066,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13048,6 +13078,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -13056,6 +13087,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13067,6 +13099,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -13075,6 +13108,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13086,6 +13120,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -13094,6 +13129,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13105,6 +13141,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -13119,6 +13156,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -13127,6 +13165,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13138,6 +13177,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -13146,6 +13186,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13157,6 +13198,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -13593,9 +13635,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -13639,41 +13679,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -13700,21 +13728,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -13728,7 +13759,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -13743,6 +13775,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -13751,6 +13784,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13762,6 +13796,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -13770,6 +13805,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13781,6 +13817,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -13789,6 +13826,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13800,6 +13838,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -13808,6 +13847,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13819,6 +13859,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -13827,6 +13868,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13838,6 +13880,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -13846,6 +13889,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13857,11 +13901,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) + u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13873,6 +13916,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.baseCommand is not None and "baseCommand" not in r: r["baseCommand"] = save( @@ -13881,6 +13925,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13892,6 +13937,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.arguments is not None and "arguments" not in r: r["arguments"] = save( @@ -13900,6 +13946,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13911,6 +13958,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.stdin is not None and "stdin" not in r: r["stdin"] = save( @@ -13919,6 +13967,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13930,6 +13979,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.stderr is not None and "stderr" not in r: r["stderr"] = save( @@ -13938,6 +13988,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13949,6 +14000,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.stdout is not None and "stdout" not in r: r["stdout"] = save( @@ -13957,6 +14009,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13968,6 +14021,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.successCodes is not None and "successCodes" not in r: r["successCodes"] = save( @@ -13976,6 +14030,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13987,6 +14042,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.temporaryFailCodes is not None and "temporaryFailCodes" not in r: r["temporaryFailCodes"] = save( @@ -13995,6 +14051,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14006,6 +14063,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.permanentFailCodes is not None and "permanentFailCodes" not in r: r["permanentFailCodes"] = save( @@ -14014,6 +14072,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14025,6 +14084,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -14297,9 +14357,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -14332,26 +14390,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -14374,21 +14424,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14402,7 +14455,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.dockerPull is not None and "dockerPull" not in r: r["dockerPull"] = save( @@ -14411,6 +14465,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14422,6 +14477,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dockerLoad is not None and "dockerLoad" not in r: r["dockerLoad"] = save( @@ -14430,6 +14486,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14441,6 +14498,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dockerFile is not None and "dockerFile" not in r: r["dockerFile"] = save( @@ -14449,6 +14507,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14460,6 +14519,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dockerImport is not None and "dockerImport" not in r: r["dockerImport"] = save( @@ -14468,6 +14528,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14479,6 +14540,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dockerImageId is not None and "dockerImageId" not in r: r["dockerImageId"] = save( @@ -14487,6 +14549,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14498,6 +14561,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dockerOutputDirectory is not None and "dockerOutputDirectory" not in r: r["dockerOutputDirectory"] = save( @@ -14506,6 +14570,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14517,6 +14582,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -14609,16 +14675,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `packages`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `packages`".format(k), SourceLine(_doc, k, str), ) ) @@ -14639,26 +14701,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -14681,21 +14735,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14709,7 +14766,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.packages is not None and "packages" not in r: r["packages"] = save( @@ -14718,6 +14776,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14729,6 +14788,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -14844,9 +14904,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -14876,26 +14934,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -14916,21 +14966,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14944,7 +14997,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.package is not None and "package" not in r: r["package"] = save( @@ -14953,6 +15007,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14964,6 +15019,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.version is not None and "version" not in r: r["version"] = save( @@ -14972,6 +15028,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14983,6 +15040,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.specs is not None and "specs" not in r: u = save_relative_uri(self.specs, base_url, False, None, relative_uris) @@ -14997,6 +15055,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -15120,9 +15179,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -15152,26 +15209,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -15192,21 +15241,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15220,7 +15272,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.entryname is not None and "entryname" not in r: r["entryname"] = save( @@ -15229,6 +15282,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15240,6 +15294,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.entry is not None and "entry" not in r: r["entry"] = save( @@ -15248,6 +15303,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15259,6 +15315,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.writable is not None and "writable" not in r: r["writable"] = save( @@ -15267,6 +15324,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15278,6 +15336,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -15358,25 +15417,19 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `listing`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `listing`".format(k), SourceLine(_doc, k, str), ) ) break if _errors__: - raise ValidationException( - "Trying 'InitialWorkDirRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'InitialWorkDirRequirement'", None, _errors__) _constructed = cls( listing=listing, extension_fields=extension_fields, @@ -15390,26 +15443,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -15432,21 +15477,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15460,7 +15508,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.listing is not None and "listing" not in r: r["listing"] = save( @@ -15469,6 +15518,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15480,6 +15530,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -15562,16 +15613,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `envDef`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `envDef`".format(k), SourceLine(_doc, k, str), ) ) @@ -15592,26 +15639,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -15634,21 +15673,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15662,7 +15704,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.envDef is not None and "envDef" not in r: r["envDef"] = save( @@ -15671,6 +15714,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15682,6 +15726,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -15752,9 +15797,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -15766,9 +15809,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'ShellCommandRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'ShellCommandRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -15781,26 +15822,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -15823,21 +15856,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15851,7 +15887,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -16117,9 +16154,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -16154,26 +16189,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -16196,21 +16223,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -16224,7 +16254,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.coresMin is not None and "coresMin" not in r: r["coresMin"] = save( @@ -16233,6 +16264,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16244,6 +16276,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.coresMax is not None and "coresMax" not in r: r["coresMax"] = save( @@ -16252,6 +16285,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16263,6 +16297,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.ramMin is not None and "ramMin" not in r: r["ramMin"] = save( @@ -16271,6 +16306,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16282,6 +16318,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.ramMax is not None and "ramMax" not in r: r["ramMax"] = save( @@ -16290,6 +16327,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16301,6 +16339,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.tmpdirMin is not None and "tmpdirMin" not in r: r["tmpdirMin"] = save( @@ -16309,6 +16348,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16320,6 +16360,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.tmpdirMax is not None and "tmpdirMax" not in r: r["tmpdirMax"] = save( @@ -16328,6 +16369,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16339,6 +16381,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outdirMin is not None and "outdirMin" not in r: r["outdirMin"] = save( @@ -16347,6 +16390,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16358,6 +16402,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outdirMax is not None and "outdirMax" not in r: r["outdirMax"] = save( @@ -16366,6 +16411,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16377,6 +16423,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -16436,9 +16483,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, WorkReuse): - return bool( - self.class_ == other.class_ and self.enableReuse == other.enableReuse - ) + return bool(self.class_ == other.class_ and self.enableReuse == other.enableReuse) return False def __hash__(self) -> int: @@ -16480,16 +16525,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `enableReuse`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `enableReuse`".format(k), SourceLine(_doc, k, str), ) ) @@ -16510,26 +16551,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -16552,21 +16585,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -16580,7 +16616,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.enableReuse is not None and "enableReuse" not in r: r["enableReuse"] = save( @@ -16589,6 +16626,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16600,6 +16638,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -16653,10 +16692,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, NetworkAccess): - return bool( - self.class_ == other.class_ - and self.networkAccess == other.networkAccess - ) + return bool(self.class_ == other.class_ and self.networkAccess == other.networkAccess) return False def __hash__(self) -> int: @@ -16698,9 +16734,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -16728,26 +16762,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -16770,21 +16796,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -16798,7 +16827,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.networkAccess is not None and "networkAccess" not in r: r["networkAccess"] = save( @@ -16807,6 +16837,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16818,6 +16849,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -16886,10 +16918,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, InplaceUpdateRequirement): - return bool( - self.class_ == other.class_ - and self.inplaceUpdate == other.inplaceUpdate - ) + return bool(self.class_ == other.class_ and self.inplaceUpdate == other.inplaceUpdate) return False def __hash__(self) -> int: @@ -16931,9 +16960,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -16947,9 +16974,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'InplaceUpdateRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'InplaceUpdateRequirement'", None, _errors__) _constructed = cls( inplaceUpdate=inplaceUpdate, extension_fields=extension_fields, @@ -16963,26 +16988,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -17005,21 +17022,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -17033,7 +17053,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.inplaceUpdate is not None and "inplaceUpdate" not in r: r["inplaceUpdate"] = save( @@ -17042,6 +17063,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17053,6 +17075,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -17098,9 +17121,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, ToolTimeLimit): - return bool( - self.class_ == other.class_ and self.timelimit == other.timelimit - ) + return bool(self.class_ == other.class_ and self.timelimit == other.timelimit) return False def __hash__(self) -> int: @@ -17142,16 +17163,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `timelimit`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `timelimit`".format(k), SourceLine(_doc, k, str), ) ) @@ -17172,26 +17189,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -17214,21 +17223,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -17242,7 +17254,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.timelimit is not None and "timelimit" not in r: r["timelimit"] = save( @@ -17251,6 +17264,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17262,6 +17276,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -17480,9 +17495,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17496,9 +17509,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'ExpressionToolOutputParameter'", None, _errors__ - ) + raise ValidationException("Trying 'ExpressionToolOutputParameter'", None, _errors__) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -17519,41 +17530,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -17578,21 +17577,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -17606,7 +17608,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -17621,6 +17624,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -17629,6 +17633,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17640,6 +17645,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -17648,6 +17654,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17659,6 +17666,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -17667,6 +17675,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17678,6 +17687,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -17686,6 +17696,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17697,6 +17708,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -17711,6 +17723,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -17719,6 +17732,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17730,6 +17744,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -17740,9 +17755,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] - ) + attrs = frozenset(["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"]) class WorkflowInputParameter(InputParameter): @@ -18038,9 +18051,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -18054,9 +18065,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'WorkflowInputParameter'", None, _errors__ - ) + raise ValidationException("Trying 'WorkflowInputParameter'", None, _errors__) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -18081,41 +18090,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -18140,21 +18137,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -18168,7 +18168,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18183,6 +18184,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -18191,6 +18193,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18202,6 +18205,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -18210,6 +18214,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18221,6 +18226,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -18229,6 +18235,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18240,6 +18247,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -18248,6 +18256,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18259,6 +18268,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -18273,6 +18283,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -18281,6 +18292,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18292,6 +18304,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -18300,6 +18313,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18311,6 +18325,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -18319,6 +18334,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18330,6 +18346,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -18338,6 +18355,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18349,6 +18367,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -18357,6 +18376,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18368,6 +18388,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -18656,9 +18677,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -18695,41 +18714,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -18756,21 +18763,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -18784,7 +18794,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18799,6 +18810,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -18807,6 +18819,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18818,6 +18831,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -18826,6 +18840,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18837,6 +18852,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -18845,6 +18861,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18856,6 +18873,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -18864,6 +18882,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18875,6 +18894,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -18883,6 +18903,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18894,6 +18915,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -18902,6 +18924,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18913,11 +18936,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) + u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18929,6 +18951,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.expression is not None and "expression" not in r: r["expression"] = save( @@ -18937,6 +18960,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18948,6 +18972,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -19231,9 +19256,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19247,9 +19270,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'WorkflowOutputParameter'", None, _errors__ - ) + raise ValidationException("Trying 'WorkflowOutputParameter'", None, _errors__) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -19272,41 +19293,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -19331,21 +19340,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -19359,7 +19371,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -19374,6 +19387,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -19382,6 +19396,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19393,6 +19408,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -19401,6 +19417,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19412,6 +19429,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -19420,6 +19438,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19431,6 +19450,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -19439,6 +19459,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19450,6 +19471,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -19464,11 +19486,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputSource is not None and "outputSource" not in r: - u = save_relative_uri( - self.outputSource, str(self.id), False, 1, relative_uris - ) + u = save_relative_uri(self.outputSource, str(self.id), False, 1, relative_uris) r["outputSource"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19480,6 +19501,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.linkMerge is not None and "linkMerge" not in r: r["linkMerge"] = save( @@ -19488,6 +19510,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19499,6 +19522,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -19507,6 +19531,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19518,6 +19543,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -19824,9 +19850,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19862,41 +19886,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -19921,21 +19933,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -19949,7 +19964,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -19964,6 +19980,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.source is not None and "source" not in r: u = save_relative_uri(self.source, str(self.id), False, 2, relative_uris) @@ -19978,6 +19995,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.linkMerge is not None and "linkMerge" not in r: r["linkMerge"] = save( @@ -19986,6 +20004,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19997,6 +20016,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -20005,6 +20025,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20016,6 +20037,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -20024,6 +20046,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20035,6 +20058,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -20043,6 +20067,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20054,6 +20079,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -20062,6 +20088,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20073,6 +20100,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.valueFrom is not None and "valueFrom" not in r: r["valueFrom"] = save( @@ -20081,6 +20109,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20092,6 +20121,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -20198,9 +20228,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -20227,41 +20255,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -20286,21 +20302,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -20314,7 +20333,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -20329,6 +20349,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -20612,7 +20633,7 @@ def fromDoc( else: hints = None - subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) + subscope_baseuri = expand_url("run", baseuri, loadingOptions, True) try: run = load_field( _doc.get("run"), @@ -20668,9 +20689,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -20708,41 +20727,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -20767,21 +20774,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -20795,7 +20805,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -20810,6 +20821,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -20818,6 +20830,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20829,6 +20842,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -20837,6 +20851,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20848,6 +20863,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.in_ is not None and "in" not in r: r["in"] = save( @@ -20856,6 +20872,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20867,6 +20884,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.out is not None and "out" not in r: u = save_relative_uri(self.out, str(self.id), True, None, relative_uris) @@ -20881,6 +20899,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -20889,6 +20908,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20900,6 +20920,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -20908,6 +20929,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20919,6 +20941,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.run is not None and "run" not in r: u = save_relative_uri(self.run, str(self.id), False, None, relative_uris) @@ -20933,6 +20956,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.scatter is not None and "scatter" not in r: u = save_relative_uri(self.scatter, str(self.id), False, 0, relative_uris) @@ -20947,11 +20971,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.scatterMethod is not None and "scatterMethod" not in r: - u = save_relative_uri( - self.scatterMethod, str(self.id), False, None, relative_uris - ) + u = save_relative_uri(self.scatterMethod, str(self.id), False, None, relative_uris) r["scatterMethod"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20963,6 +20986,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -21288,9 +21312,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -21327,41 +21349,29 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -21388,21 +21398,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -21416,7 +21429,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -21431,6 +21445,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -21439,6 +21454,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21450,6 +21466,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -21458,6 +21475,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21469,6 +21487,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -21477,6 +21496,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21488,6 +21508,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -21496,6 +21517,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21507,6 +21529,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -21515,6 +21538,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21526,6 +21550,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -21534,6 +21559,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21545,11 +21571,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) + u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21561,6 +21586,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.steps is not None and "steps" not in r: r["steps"] = save( @@ -21569,6 +21595,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21580,6 +21607,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -21658,9 +21686,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -21672,9 +21698,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'SubworkflowFeatureRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'SubworkflowFeatureRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -21687,26 +21711,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -21729,21 +21745,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -21757,7 +21776,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -21823,9 +21843,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -21837,9 +21855,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'ScatterFeatureRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'ScatterFeatureRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -21852,26 +21868,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -21894,21 +21902,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -21922,7 +21933,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -21988,9 +22000,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -22002,9 +22012,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'MultipleInputFeatureRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'MultipleInputFeatureRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -22017,26 +22025,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -22059,21 +22059,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -22087,7 +22090,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -22153,9 +22157,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -22167,9 +22169,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'StepInputExpressionRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'StepInputExpressionRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -22182,26 +22182,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -22224,21 +22216,24 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -22252,7 +22247,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -22887,16 +22883,14 @@ def save( array_of_union_of_FileLoader_or_DirectoryLoader, ) ) -secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _SecondaryDSLLoader( - union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader +secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = ( + _SecondaryDSLLoader(union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader) ) uri_union_of_None_type_or_strtype_True_False_None = _URILoader( union_of_None_type_or_strtype, True, False, None ) Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") -uri_Directory_classLoader_False_True_None = _URILoader( - Directory_classLoader, False, True, None -) +uri_Directory_classLoader_False_True_None = _URILoader(Directory_classLoader, False, True, None) union_of_None_type_or_booltype = _UnionLoader( ( None_type, @@ -22910,11 +22904,13 @@ def save( ) ) array_of_SecondaryFileSchemaLoader = _ArrayLoader(SecondaryFileSchemaLoader) -union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _UnionLoader( - ( - None_type, - SecondaryFileSchemaLoader, - array_of_SecondaryFileSchemaLoader, +union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = ( + _UnionLoader( + ( + None_type, + SecondaryFileSchemaLoader, + array_of_SecondaryFileSchemaLoader, + ) ) ) secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _SecondaryDSLLoader( @@ -22928,11 +22924,13 @@ def save( ExpressionLoader, ) ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, - True, - False, - None, +uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = ( + _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, + True, + False, + None, + ) ) union_of_None_type_or_strtype_or_ExpressionLoader = _UnionLoader( ( @@ -23030,13 +23028,15 @@ def save( WorkflowInputParameterLoader, ) ) -array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = ( - _ArrayLoader(union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader) +array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = _ArrayLoader( + union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader ) -idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = _IdMapLoader( - array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader, - "id", - "type", +idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = ( + _IdMapLoader( + array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader, + "id", + "type", + ) ) union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader = _UnionLoader( ( @@ -23240,10 +23240,8 @@ def save( array_of_CommandInputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( - _IdMapLoader( - union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" - ) +idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" ) union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( ( @@ -23284,10 +23282,8 @@ def save( array_of_CommandOutputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( - _IdMapLoader( - union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" - ) +idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" ) union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( ( @@ -23339,13 +23335,15 @@ def save( CommandLineBindingLoader, ) ) -array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( - _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) +array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _ArrayLoader( + union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader ) -union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( - ( - None_type, - array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, +union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( + _UnionLoader( + ( + None_type, + array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + ) ) ) array_of_inttype = _ArrayLoader(inttype) @@ -23355,15 +23353,11 @@ def save( array_of_inttype, ) ) -DockerRequirement_classLoader = _EnumLoader( - ("DockerRequirement",), "DockerRequirement_class" -) +DockerRequirement_classLoader = _EnumLoader(("DockerRequirement",), "DockerRequirement_class") uri_DockerRequirement_classLoader_False_True_None = _URILoader( DockerRequirement_classLoader, False, True, None ) -SoftwareRequirement_classLoader = _EnumLoader( - ("SoftwareRequirement",), "SoftwareRequirement_class" -) +SoftwareRequirement_classLoader = _EnumLoader(("SoftwareRequirement",), "SoftwareRequirement_class") uri_SoftwareRequirement_classLoader_False_True_None = _URILoader( SoftwareRequirement_classLoader, False, True, None ) @@ -23399,9 +23393,7 @@ def save( ExpressionLoader, ) ) -EnvVarRequirement_classLoader = _EnumLoader( - ("EnvVarRequirement",), "EnvVarRequirement_class" -) +EnvVarRequirement_classLoader = _EnumLoader(("EnvVarRequirement",), "EnvVarRequirement_class") uri_EnvVarRequirement_classLoader_False_True_None = _URILoader( EnvVarRequirement_classLoader, False, True, None ) @@ -23415,16 +23407,12 @@ def save( uri_ShellCommandRequirement_classLoader_False_True_None = _URILoader( ShellCommandRequirement_classLoader, False, True, None ) -ResourceRequirement_classLoader = _EnumLoader( - ("ResourceRequirement",), "ResourceRequirement_class" -) +ResourceRequirement_classLoader = _EnumLoader(("ResourceRequirement",), "ResourceRequirement_class") uri_ResourceRequirement_classLoader_False_True_None = _URILoader( ResourceRequirement_classLoader, False, True, None ) WorkReuse_classLoader = _EnumLoader(("WorkReuse",), "WorkReuse_class") -uri_WorkReuse_classLoader_False_True_None = _URILoader( - WorkReuse_classLoader, False, True, None -) +uri_WorkReuse_classLoader_False_True_None = _URILoader(WorkReuse_classLoader, False, True, None) union_of_booltype_or_ExpressionLoader = _UnionLoader( ( booltype, @@ -23465,9 +23453,7 @@ def save( idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader( array_of_WorkflowInputParameterLoader, "id", "type" ) -array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( - ExpressionToolOutputParameterLoader -) +array_of_ExpressionToolOutputParameterLoader = _ArrayLoader(ExpressionToolOutputParameterLoader) idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( array_of_ExpressionToolOutputParameterLoader, "id", "type" ) @@ -23499,13 +23485,11 @@ def save( union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) ) -uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = ( - _URILoader( - union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, - True, - False, - None, - ) +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = _URILoader( + union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, + True, + False, + None, ) array_of_Any_type = _ArrayLoader(Any_type) union_of_None_type_or_array_of_Any_type = _UnionLoader( @@ -23517,14 +23501,12 @@ def save( idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( union_of_None_type_or_array_of_Any_type, "class", "None" ) -union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( - _UnionLoader( - ( - strtype, - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, - ) +union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( + ( + strtype, + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, ) ) uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None = _URILoader( @@ -23546,9 +23528,7 @@ def save( union_of_None_type_or_ScatterMethodLoader, False, True, None ) Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") -uri_Workflow_classLoader_False_True_None = _URILoader( - Workflow_classLoader, False, True, None -) +uri_Workflow_classLoader_False_True_None = _URILoader(Workflow_classLoader, False, True, None) array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( array_of_WorkflowOutputParameterLoader, "id", "type" @@ -23589,10 +23569,8 @@ def save( WorkflowLoader, ) ) -array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( - _ArrayLoader( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader - ) +array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _ArrayLoader( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader ) union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( ( diff --git a/schema_salad/tests/cwl_v1_2.py b/schema_salad/tests/cwl_v1_2.py index c849c59bc..6e482dbd4 100644 --- a/schema_salad/tests/cwl_v1_2.py +++ b/schema_salad/tests/cwl_v1_2.py @@ -24,6 +24,7 @@ Type, Union, cast, + no_type_check, ) from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit from urllib.request import pathname2url @@ -47,6 +48,7 @@ doc_line_info = CommentedMap() + class LoadingOptions: idx: IdxType fileuri: Optional[str] @@ -206,6 +208,8 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> CommentedMap: """Convert this object to a JSON/YAML friendly dictionary.""" @@ -245,28 +249,41 @@ def add_kv( max_len: int, cols: Dict[int, int], min_col: int = 0, - inserted_line_info: Dict[int, int] = {} -) -> int: + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, +) -> Tuple[int, Optional[Dict[int, int]]]: """Add key value pair into Commented Map. Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers for each key/val pair in the old CommentedMap,key/val pair to insert, max_line of the old CommentedMap, and max col value taken for each line. """ + if inserted_line_info is None: + inserted_line_info = {} + if len(inserted_line_info.keys()) >= 1: max_line = max(inserted_line_info.keys()) + 1 else: max_line = 0 - if ( - key in line_numbers - ): # If the key to insert is in the original CommentedMap as a key - line_info = old_doc.lc.data[key] - if line_info[0] not in inserted_line_info: - new_doc.lc.add_kv_line_col(key, old_doc.lc.data[key]) - inserted_line_info[old_doc.lc.data[key][0]] = old_doc.lc.data[key][1] - else: - line = line_info[0] - while line in inserted_line_info.keys(): + + if key in line_numbers: # If the passed key to insert is in the original CommentedMap as a key + line_info = old_doc.lc.data[key] # Get the line information for the key + if ( + line_info[0] + shift not in inserted_line_info + ): # If the line of the key + shift isn't taken, add it + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[key][0] + shift, + old_doc.lc.data[key][1], + old_doc.lc.data[key][2] + shift, + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[old_doc.lc.data[key][0] + shift] = old_doc.lc.data[key][1] + else: # If the line is already taken + line = line_info[0] + shift + while line in inserted_line_info.keys(): # Find the closest free line line += 1 new_doc.lc.add_kv_line_col( key, @@ -283,59 +300,88 @@ def add_kv( val, bool ): # If the value is hashable if val in line_numbers: # If the value is in the original CommentedMap - line = line_numbers[val]["line"] - if line in inserted_line_info: + line = line_numbers[val]["line"] + shift # Get the line info for the value + if line in inserted_line_info: # Get the appropriate line to place value on line = max_line - if line in cols: - col = max(line_numbers[val]["col"], cols[line]) - else: - col = line_numbers[val]["col"] + + col = line_numbers[val]["col"] new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) inserted_line_info[line] = col + len(key) + 2 - cols[line] = col + len("id") + 2 return max_len, inserted_line_info - elif isinstance(val, str): + elif isinstance(val, str): # Logic for DSL expansition with "?" if val + "?" in line_numbers: - line = line_numbers[val + "?"]["line"] + line = line_numbers[val + "?"]["line"] + shift if line in inserted_line_info: line = max_line - if line in cols: - col = max(line_numbers[val + "?"]["col"], cols[line]) - else: - col = line_numbers[val + "?"]["col"] + col = line_numbers[val + "?"]["col"] new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) inserted_line_info[line] = col + len(key) + 2 - cols[line] = col + len("id") + 2 return max_len, inserted_line_info elif old_doc: if val in old_doc: index = old_doc.lc.data.index(val) line_info = old_doc.lc.data[index] - if line_info[0] not in inserted_line_info: - new_doc.lc.add_kv_line_col(key, old_doc.lc.data[index]) - inserted_line_info[old_doc.lc.data[index][0]] = old_doc.lc.data[ - index - ][1] + if line_info[0] + shift not in inserted_line_info: + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[index][0] + shift, + old_doc.lc.data[index][1], + old_doc.lc.data[index][2] + shift, + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[old_doc.lc.data[index][0] + shift] = old_doc.lc.data[index][ + 1 + ] else: new_doc.lc.add_kv_line_col( key, [ - max_line, + max_line + shift, old_doc.lc.data[index][1], - max_line + (max_line - old_doc.lc.data[index][2]), + max_line + (max_line - old_doc.lc.data[index][2]) + shift, old_doc.lc.data[index][3], ], ) - inserted_line_info[max_line] = old_doc.lc.data[index][1] - # If neither the key or value is in the original CommentedMap (or value is not hashable) - new_doc.lc.add_kv_line_col( - key, [max_line, min_col, max_line, min_col + len(key) + 2] - ) + inserted_line_info[max_line + shift] = old_doc.lc.data[index][1] + # If neither the key or value is in the original CommentedMap/old doc (or value is not hashable) + new_doc.lc.add_kv_line_col(key, [max_line, min_col, max_line, min_col + len(key) + 2]) inserted_line_info[max_line] = min_col + len(key) + 2 return max_len + 1, inserted_line_info -def get_line_numbers(doc: CommentedMap) -> Dict[Any, Dict[str, int]]: +@no_type_check +def iterate_through_doc(keys: List[Any]) -> Optional[CommentedMap]: + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + return None + else: + return None + if isinstance(doc, CommentedSeq): + to_return = CommentedMap() + for index, key in enumerate(doc): + to_return[key] = "" + to_return.lc.add_kv_line_col( + key, + [ + doc.lc.data[index][0], + doc.lc.data[index][1], + doc.lc.data[index][0], + doc.lc.data[index][1], + ], + ) + return to_return + return doc + + +def get_line_numbers(doc: Optional[CommentedMap]) -> Dict[Any, Dict[str, int]]: """Get line numbers for kv pairs in CommentedMap. For each key/value pair in a CommentedMap, save the line/col info into a dictionary, @@ -390,7 +436,8 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> save_type: """Save a val of any type. @@ -399,22 +446,17 @@ def save( """ if keys is None: keys = [] - doc = doc_line_info - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) if isinstance(val, Saveable): return val.save( - top=top, base_url=base_url, relative_uris=relative_uris, keys=keys, inserted_line_info=inserted_line_info + top=top, + base_url=base_url, + relative_uris=relative_uris, + keys=keys, + inserted_line_info=inserted_line_info, + shift=shift, ) if isinstance(val, MutableSequence): r = CommentedSeq() @@ -432,7 +474,8 @@ def save( base_url=base_url, relative_uris=relative_uris, keys=new_keys, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift, ) ) return r @@ -454,6 +497,7 @@ def save( relative_uris=relative_uris, keys=new_keys, inserted_line_info=inserted_line_info, + shift=shift, ) return newdict @@ -1212,26 +1256,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -1253,13 +1289,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -1280,7 +1321,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1295,6 +1337,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -1303,6 +1346,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1314,6 +1358,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -1322,6 +1367,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1333,6 +1379,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -1455,26 +1502,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -1496,13 +1535,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -1523,7 +1567,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -1532,6 +1577,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1543,6 +1589,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -1551,6 +1598,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1562,6 +1610,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -1721,26 +1770,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -1762,13 +1803,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -1789,7 +1835,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1804,6 +1851,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri( @@ -1820,6 +1868,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -1828,6 +1877,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1839,6 +1889,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -1958,26 +2009,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -1999,13 +2042,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -2026,7 +2074,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.items is not None and "items" not in r: u = save_relative_uri(self.items, base_url, False, 2, relative_uris) @@ -2041,6 +2090,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -2049,6 +2099,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2060,6 +2111,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -2477,26 +2529,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -2520,13 +2564,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -2547,7 +2596,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -2562,6 +2612,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.path is not None and "path" not in r: u = save_relative_uri(self.path, base_url, False, None, relative_uris) @@ -2576,6 +2627,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.basename is not None and "basename" not in r: r["basename"] = save( @@ -2584,6 +2636,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2595,6 +2648,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dirname is not None and "dirname" not in r: r["dirname"] = save( @@ -2603,6 +2657,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2614,6 +2669,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.nameroot is not None and "nameroot" not in r: r["nameroot"] = save( @@ -2622,6 +2678,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2633,6 +2690,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.nameext is not None and "nameext" not in r: r["nameext"] = save( @@ -2641,6 +2699,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2652,6 +2711,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.checksum is not None and "checksum" not in r: r["checksum"] = save( @@ -2660,6 +2720,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2671,6 +2732,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.size is not None and "size" not in r: r["size"] = save( @@ -2679,6 +2741,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2690,6 +2753,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -2698,6 +2762,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2709,6 +2774,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, base_url, True, None, relative_uris) @@ -2723,6 +2789,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.contents is not None and "contents" not in r: r["contents"] = save( @@ -2731,6 +2798,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -2742,6 +2810,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -2984,26 +3053,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -3027,13 +3088,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -3054,7 +3120,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -3069,6 +3136,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.path is not None and "path" not in r: u = save_relative_uri(self.path, base_url, False, None, relative_uris) @@ -3083,6 +3151,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.basename is not None and "basename" not in r: r["basename"] = save( @@ -3091,6 +3160,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3102,6 +3172,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.listing is not None and "listing" not in r: r["listing"] = save( @@ -3110,6 +3181,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3121,6 +3193,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -3258,26 +3331,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -3299,13 +3364,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -3326,7 +3396,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -3335,6 +3406,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3346,6 +3418,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -3659,26 +3732,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -3700,13 +3765,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -3727,7 +3797,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3742,6 +3813,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -3750,6 +3822,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3761,6 +3834,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -3769,6 +3843,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3780,6 +3855,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -3788,6 +3864,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3799,6 +3876,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -3807,6 +3885,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3818,6 +3897,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -3826,6 +3906,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3837,6 +3918,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri( @@ -3853,6 +3935,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -3861,6 +3944,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3872,6 +3956,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -3880,6 +3965,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3891,6 +3977,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -4104,26 +4191,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -4145,13 +4224,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -4172,7 +4256,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4187,6 +4272,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -4195,6 +4281,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4206,6 +4293,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4214,6 +4302,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4225,6 +4314,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -4233,6 +4323,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4244,6 +4335,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -4252,6 +4344,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4263,6 +4356,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -4461,26 +4555,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -4502,13 +4588,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -4529,7 +4620,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4544,6 +4636,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri( @@ -4560,6 +4653,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4568,6 +4662,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4579,6 +4674,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -4587,6 +4683,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4598,6 +4695,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -4606,6 +4704,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4617,6 +4716,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -4815,26 +4915,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -4856,13 +4948,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -4883,7 +4980,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4898,6 +4996,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.items is not None and "items" not in r: u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) @@ -4912,6 +5011,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -4920,6 +5020,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4931,6 +5032,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -4939,6 +5041,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4950,6 +5053,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -4958,6 +5062,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4969,6 +5074,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -5224,26 +5330,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -5265,13 +5363,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -5292,7 +5395,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5307,6 +5411,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -5315,6 +5420,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5326,6 +5432,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -5334,6 +5441,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5345,6 +5453,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -5353,6 +5462,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5364,6 +5474,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -5372,6 +5483,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5383,6 +5495,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -5391,6 +5504,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5402,6 +5516,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri( @@ -5418,6 +5533,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -5621,26 +5737,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -5662,13 +5770,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -5689,7 +5802,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5704,6 +5818,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -5712,6 +5827,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5723,6 +5839,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -5731,6 +5848,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5742,6 +5860,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -5750,6 +5869,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5761,6 +5881,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -5769,6 +5890,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5780,6 +5902,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -5978,26 +6101,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -6019,13 +6134,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -6046,7 +6166,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -6061,6 +6182,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri( @@ -6077,6 +6199,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -6085,6 +6208,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6096,6 +6220,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -6104,6 +6229,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6115,6 +6241,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -6123,6 +6250,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6134,6 +6262,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -6332,26 +6461,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -6373,13 +6494,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -6400,7 +6526,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -6415,6 +6542,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.items is not None and "items" not in r: u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) @@ -6429,6 +6557,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -6437,6 +6566,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6448,6 +6578,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -6456,6 +6587,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6467,6 +6599,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -6475,6 +6608,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6486,6 +6620,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -6641,26 +6776,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -6684,13 +6811,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -6711,7 +6843,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.expressionLib is not None and "expressionLib" not in r: r["expressionLib"] = save( @@ -6720,6 +6853,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6731,6 +6865,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -6857,26 +6992,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -6900,13 +7027,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -6927,7 +7059,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.types is not None and "types" not in r: r["types"] = save( @@ -6936,6 +7069,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6947,6 +7081,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -7088,26 +7223,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -7129,13 +7256,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -7156,7 +7288,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.pattern is not None and "pattern" not in r: r["pattern"] = save( @@ -7165,6 +7298,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7176,6 +7310,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.required is not None and "required" not in r: r["required"] = save( @@ -7184,6 +7319,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7195,6 +7331,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -7314,26 +7451,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -7357,13 +7486,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -7384,7 +7518,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -7393,6 +7528,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7404,6 +7540,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -7532,26 +7669,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -7573,13 +7702,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -7600,7 +7734,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.envName is not None and "envName" not in r: r["envName"] = save( @@ -7609,6 +7744,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7620,6 +7756,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.envValue is not None and "envValue" not in r: r["envValue"] = save( @@ -7628,6 +7765,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -7639,6 +7777,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -7926,26 +8065,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -7967,13 +8098,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -7994,7 +8130,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8003,6 +8140,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8014,6 +8152,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.position is not None and "position" not in r: r["position"] = save( @@ -8022,6 +8161,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8033,6 +8173,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.prefix is not None and "prefix" not in r: r["prefix"] = save( @@ -8041,6 +8182,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8052,6 +8194,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.separate is not None and "separate" not in r: r["separate"] = save( @@ -8060,6 +8203,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8071,6 +8215,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.itemSeparator is not None and "itemSeparator" not in r: r["itemSeparator"] = save( @@ -8079,6 +8224,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8090,6 +8236,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.valueFrom is not None and "valueFrom" not in r: r["valueFrom"] = save( @@ -8098,6 +8245,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8109,6 +8257,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.shellQuote is not None and "shellQuote" not in r: r["shellQuote"] = save( @@ -8117,6 +8266,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8128,6 +8278,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -8324,26 +8475,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -8365,13 +8508,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -8392,7 +8540,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8401,6 +8550,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8412,6 +8562,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -8420,6 +8571,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8431,6 +8583,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.glob is not None and "glob" not in r: r["glob"] = save( @@ -8439,6 +8592,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8450,6 +8604,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputEval is not None and "outputEval" not in r: r["outputEval"] = save( @@ -8458,6 +8613,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8469,6 +8625,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -8573,26 +8730,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -8614,13 +8763,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -8641,7 +8795,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -8650,6 +8805,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -8661,6 +8817,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -8987,26 +9144,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -9028,13 +9177,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -9055,7 +9209,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9070,6 +9225,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -9078,6 +9234,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9089,6 +9246,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -9097,6 +9255,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9108,6 +9267,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -9116,6 +9276,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9127,6 +9288,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -9135,6 +9297,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9146,6 +9309,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -9154,6 +9318,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9165,6 +9330,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri( @@ -9181,6 +9347,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -9189,6 +9356,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9200,6 +9368,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -9208,6 +9377,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9219,6 +9389,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -9227,6 +9398,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9238,6 +9410,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -9480,26 +9653,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -9521,13 +9686,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -9548,7 +9718,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9563,6 +9734,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -9571,6 +9743,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9582,6 +9755,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -9590,6 +9764,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9601,6 +9776,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -9609,6 +9785,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9620,6 +9797,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -9628,6 +9806,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9639,6 +9818,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -9647,6 +9827,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9658,6 +9839,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -9889,26 +10071,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -9930,13 +10104,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -9957,7 +10136,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9972,6 +10152,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri( @@ -9988,6 +10169,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -9996,6 +10178,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10007,6 +10190,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -10015,6 +10199,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10026,6 +10211,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -10034,6 +10220,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10045,6 +10232,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -10053,6 +10241,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10064,6 +10253,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -10290,26 +10480,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -10331,13 +10513,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -10358,7 +10545,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10373,6 +10561,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.items is not None and "items" not in r: u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) @@ -10387,6 +10576,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -10395,6 +10585,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10406,6 +10597,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -10414,6 +10606,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10425,6 +10618,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -10433,6 +10627,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10444,6 +10639,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -10452,6 +10648,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10463,6 +10660,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -10743,26 +10941,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -10784,13 +10974,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -10811,7 +11006,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10826,6 +11022,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -10834,6 +11031,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10845,6 +11043,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -10853,6 +11052,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10864,6 +11064,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -10872,6 +11073,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10883,6 +11085,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -10891,6 +11094,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10902,6 +11106,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -10910,6 +11115,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10921,6 +11127,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri( @@ -10937,6 +11144,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -10945,6 +11153,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10956,6 +11165,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -11170,26 +11380,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -11211,13 +11413,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -11238,7 +11445,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -11253,6 +11461,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -11261,6 +11470,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11272,6 +11482,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -11280,6 +11491,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11291,6 +11503,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -11299,6 +11512,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11310,6 +11524,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -11318,6 +11533,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11329,6 +11545,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -11529,26 +11746,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -11570,13 +11779,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -11597,7 +11811,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -11612,6 +11827,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri( @@ -11628,6 +11844,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -11636,6 +11853,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11647,6 +11865,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -11655,6 +11874,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11666,6 +11886,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -11674,6 +11895,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11685,6 +11907,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -11885,26 +12108,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -11926,13 +12141,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -11953,7 +12173,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -11968,6 +12189,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.items is not None and "items" not in r: u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) @@ -11982,6 +12204,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -11990,6 +12213,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12001,6 +12225,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -12009,6 +12234,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12020,6 +12246,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -12028,6 +12255,7 @@ def save( base_url=str(self.name), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12039,6 +12267,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -12390,26 +12619,19 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: @@ -12420,11 +12642,6 @@ def save( keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -12450,13 +12667,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -12477,7 +12699,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -12492,6 +12715,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -12500,6 +12724,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12511,6 +12736,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -12519,6 +12745,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12530,6 +12757,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -12538,6 +12766,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12549,6 +12778,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -12557,6 +12787,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12568,6 +12799,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -12582,6 +12814,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -12590,6 +12823,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12601,6 +12835,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -12609,6 +12844,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12620,6 +12856,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -12628,6 +12865,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12639,6 +12877,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -12647,6 +12886,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12658,6 +12898,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -12666,6 +12907,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12677,6 +12919,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -12975,26 +13218,19 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: @@ -13005,11 +13241,6 @@ def save( keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -13035,13 +13266,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -13062,7 +13298,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -13077,6 +13314,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -13085,6 +13323,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13096,6 +13335,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -13104,6 +13344,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13115,6 +13356,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -13123,6 +13365,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13134,6 +13377,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -13142,6 +13386,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13153,6 +13398,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -13167,6 +13413,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -13175,6 +13422,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13186,6 +13434,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputBinding is not None and "outputBinding" not in r: r["outputBinding"] = save( @@ -13194,6 +13443,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13205,6 +13455,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -13710,26 +13961,19 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: @@ -13740,11 +13984,6 @@ def save( keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -13772,13 +14011,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -13799,7 +14043,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -13814,6 +14059,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -13822,6 +14068,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13833,6 +14080,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -13841,6 +14089,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13852,6 +14101,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -13860,6 +14110,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13871,6 +14122,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -13879,6 +14131,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13890,6 +14143,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -13898,6 +14152,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13909,6 +14164,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -13917,6 +14173,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13928,6 +14185,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: u = save_relative_uri( @@ -13944,6 +14202,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.intent is not None and "intent" not in r: u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) @@ -13958,6 +14217,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.baseCommand is not None and "baseCommand" not in r: r["baseCommand"] = save( @@ -13966,6 +14226,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13977,6 +14238,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.arguments is not None and "arguments" not in r: r["arguments"] = save( @@ -13985,6 +14247,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13996,6 +14259,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.stdin is not None and "stdin" not in r: r["stdin"] = save( @@ -14004,6 +14268,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14015,6 +14280,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.stderr is not None and "stderr" not in r: r["stderr"] = save( @@ -14023,6 +14289,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14034,6 +14301,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.stdout is not None and "stdout" not in r: r["stdout"] = save( @@ -14042,6 +14310,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14053,6 +14322,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.successCodes is not None and "successCodes" not in r: r["successCodes"] = save( @@ -14061,6 +14331,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14072,6 +14343,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.temporaryFailCodes is not None and "temporaryFailCodes" not in r: r["temporaryFailCodes"] = save( @@ -14080,6 +14352,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14091,6 +14364,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.permanentFailCodes is not None and "permanentFailCodes" not in r: r["permanentFailCodes"] = save( @@ -14099,6 +14373,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14110,6 +14385,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -14418,26 +14694,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -14461,13 +14729,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -14488,7 +14761,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.dockerPull is not None and "dockerPull" not in r: r["dockerPull"] = save( @@ -14497,6 +14771,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14508,6 +14783,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dockerLoad is not None and "dockerLoad" not in r: r["dockerLoad"] = save( @@ -14516,6 +14792,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14527,6 +14804,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dockerFile is not None and "dockerFile" not in r: r["dockerFile"] = save( @@ -14535,6 +14813,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14546,6 +14825,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dockerImport is not None and "dockerImport" not in r: r["dockerImport"] = save( @@ -14554,6 +14834,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14565,6 +14846,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dockerImageId is not None and "dockerImageId" not in r: r["dockerImageId"] = save( @@ -14573,6 +14855,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14584,6 +14867,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.dockerOutputDirectory is not None and "dockerOutputDirectory" not in r: r["dockerOutputDirectory"] = save( @@ -14592,6 +14876,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14603,6 +14888,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -14725,26 +15011,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -14768,13 +15046,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -14795,7 +15078,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.packages is not None and "packages" not in r: r["packages"] = save( @@ -14804,6 +15088,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14815,6 +15100,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -14962,26 +15248,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -15003,13 +15281,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -15030,7 +15313,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.package is not None and "package" not in r: r["package"] = save( @@ -15039,6 +15323,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15050,6 +15335,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.version is not None and "version" not in r: r["version"] = save( @@ -15058,6 +15344,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15069,6 +15356,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.specs is not None and "specs" not in r: u = save_relative_uri(self.specs, base_url, False, None, relative_uris) @@ -15083,6 +15371,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -15242,26 +15531,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -15283,13 +15564,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -15310,7 +15596,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.entryname is not None and "entryname" not in r: r["entryname"] = save( @@ -15319,6 +15606,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15330,6 +15618,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.entry is not None and "entry" not in r: r["entry"] = save( @@ -15338,6 +15627,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15349,6 +15639,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.writable is not None and "writable" not in r: r["writable"] = save( @@ -15357,6 +15648,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15368,6 +15660,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -15481,26 +15774,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -15524,13 +15809,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -15551,7 +15841,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.listing is not None and "listing" not in r: r["listing"] = save( @@ -15560,6 +15851,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15571,6 +15863,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -15683,26 +15976,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -15726,13 +16011,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -15753,7 +16043,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.envDef is not None and "envDef" not in r: r["envDef"] = save( @@ -15762,6 +16053,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -15773,6 +16065,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -15872,26 +16165,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -15915,13 +16200,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -15942,7 +16232,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # top refers to the directory level @@ -16250,26 +16541,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -16293,13 +16576,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -16320,7 +16608,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.coresMin is not None and "coresMin" not in r: r["coresMin"] = save( @@ -16329,6 +16618,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16340,6 +16630,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.coresMax is not None and "coresMax" not in r: r["coresMax"] = save( @@ -16348,6 +16639,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16359,6 +16651,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.ramMin is not None and "ramMin" not in r: r["ramMin"] = save( @@ -16367,6 +16660,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16378,6 +16672,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.ramMax is not None and "ramMax" not in r: r["ramMax"] = save( @@ -16386,6 +16681,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16397,6 +16693,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.tmpdirMin is not None and "tmpdirMin" not in r: r["tmpdirMin"] = save( @@ -16405,6 +16702,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16416,6 +16714,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.tmpdirMax is not None and "tmpdirMax" not in r: r["tmpdirMax"] = save( @@ -16424,6 +16723,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16435,6 +16735,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outdirMin is not None and "outdirMin" not in r: r["outdirMin"] = save( @@ -16443,6 +16744,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16454,6 +16756,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outdirMax is not None and "outdirMax" not in r: r["outdirMax"] = save( @@ -16462,6 +16765,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16473,6 +16777,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -16606,26 +16911,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -16649,13 +16946,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -16676,7 +16978,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.enableReuse is not None and "enableReuse" not in r: r["enableReuse"] = save( @@ -16685,6 +16988,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16696,6 +17000,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -16824,26 +17129,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -16867,13 +17164,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -16894,7 +17196,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.networkAccess is not None and "networkAccess" not in r: r["networkAccess"] = save( @@ -16903,6 +17206,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16914,6 +17218,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -17059,26 +17364,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -17102,13 +17399,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -17129,7 +17431,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.inplaceUpdate is not None and "inplaceUpdate" not in r: r["inplaceUpdate"] = save( @@ -17138,6 +17441,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17149,6 +17453,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -17268,26 +17573,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -17311,13 +17608,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -17338,7 +17640,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.timelimit is not None and "timelimit" not in r: r["timelimit"] = save( @@ -17347,6 +17650,7 @@ def save( base_url=base_url, relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17358,6 +17662,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -17615,26 +17920,19 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: @@ -17645,11 +17943,6 @@ def save( keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -17675,13 +17968,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -17702,7 +18000,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -17717,6 +18016,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -17725,6 +18025,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17736,6 +18037,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -17744,6 +18046,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17755,6 +18058,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -17763,6 +18067,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17774,6 +18079,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -17782,6 +18088,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17793,6 +18100,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -17807,6 +18115,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -17815,6 +18124,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17826,6 +18136,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -18177,26 +18488,19 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: @@ -18207,11 +18511,6 @@ def save( keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -18237,13 +18536,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -18264,7 +18568,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18279,6 +18584,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -18287,6 +18593,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18298,6 +18605,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -18306,6 +18614,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18317,6 +18626,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -18325,6 +18635,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18336,6 +18647,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -18344,6 +18656,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18355,6 +18668,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -18369,6 +18683,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -18377,6 +18692,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18388,6 +18704,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -18396,6 +18713,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18407,6 +18725,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -18415,6 +18734,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18426,6 +18746,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -18434,6 +18755,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18445,6 +18767,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -18453,6 +18776,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18464,6 +18788,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -18814,26 +19139,19 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: @@ -18844,11 +19162,6 @@ def save( keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -18876,13 +19189,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -18903,7 +19221,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18918,6 +19237,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -18926,6 +19246,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18937,6 +19258,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -18945,6 +19267,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18956,6 +19279,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -18964,6 +19288,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18975,6 +19300,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -18983,6 +19309,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18994,6 +19321,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -19002,6 +19330,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19013,6 +19342,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -19021,6 +19351,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19032,6 +19363,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: u = save_relative_uri( @@ -19048,6 +19380,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.intent is not None and "intent" not in r: u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) @@ -19062,6 +19395,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.expression is not None and "expression" not in r: r["expression"] = save( @@ -19070,6 +19404,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19081,6 +19416,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -19432,26 +19768,19 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: @@ -19462,11 +19791,6 @@ def save( keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -19492,13 +19816,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -19519,7 +19848,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -19534,6 +19864,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -19542,6 +19873,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19553,6 +19885,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -19561,6 +19894,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19572,6 +19906,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -19580,6 +19915,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19591,6 +19927,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -19599,6 +19936,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19610,6 +19948,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -19624,6 +19963,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputSource is not None and "outputSource" not in r: u = save_relative_uri( @@ -19640,6 +19980,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.linkMerge is not None and "linkMerge" not in r: r["linkMerge"] = save( @@ -19648,6 +19989,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19659,6 +20001,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.pickValue is not None and "pickValue" not in r: r["pickValue"] = save( @@ -19667,6 +20010,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19678,6 +20022,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -19686,6 +20031,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19697,6 +20043,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -20130,26 +20477,19 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: @@ -20160,11 +20500,6 @@ def save( keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -20190,13 +20525,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -20217,7 +20557,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -20232,6 +20573,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.source is not None and "source" not in r: u = save_relative_uri(self.source, str(self.id), False, 2, relative_uris) @@ -20246,6 +20588,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.linkMerge is not None and "linkMerge" not in r: r["linkMerge"] = save( @@ -20254,6 +20597,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20265,6 +20609,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.pickValue is not None and "pickValue" not in r: r["pickValue"] = save( @@ -20273,6 +20618,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20284,6 +20630,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -20292,6 +20639,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20303,6 +20651,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -20311,6 +20660,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20322,6 +20672,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -20330,6 +20681,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20341,6 +20693,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -20349,6 +20702,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20360,6 +20714,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.valueFrom is not None and "valueFrom" not in r: r["valueFrom"] = save( @@ -20368,6 +20723,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20379,6 +20735,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -20515,26 +20872,19 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: @@ -20545,11 +20895,6 @@ def save( keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -20575,13 +20920,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -20602,7 +20952,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -20617,6 +20968,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -21043,26 +21395,19 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: @@ -21073,11 +21418,6 @@ def save( keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -21103,13 +21443,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -21130,7 +21475,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -21145,6 +21491,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -21153,6 +21500,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21164,6 +21512,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -21172,6 +21521,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21183,6 +21533,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.in_ is not None and "in" not in r: r["in"] = save( @@ -21191,6 +21542,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21202,6 +21554,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.out is not None and "out" not in r: u = save_relative_uri(self.out, str(self.id), True, None, relative_uris) @@ -21216,6 +21569,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -21224,6 +21578,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21235,6 +21590,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -21243,6 +21599,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21254,6 +21611,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.run is not None and "run" not in r: u = save_relative_uri(self.run, str(self.id), False, None, relative_uris) @@ -21268,6 +21626,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.when is not None and "when" not in r: r["when"] = save( @@ -21276,6 +21635,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21287,6 +21647,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.scatter is not None and "scatter" not in r: u = save_relative_uri(self.scatter, str(self.id), False, 0, relative_uris) @@ -21301,6 +21662,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.scatterMethod is not None and "scatterMethod" not in r: u = save_relative_uri( @@ -21317,6 +21679,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -21711,26 +22074,19 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: @@ -21741,11 +22097,6 @@ def save( keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -21773,13 +22124,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -21800,7 +22156,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -21815,6 +22172,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -21823,6 +22181,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21834,6 +22193,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -21842,6 +22202,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21853,6 +22214,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -21861,6 +22223,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21872,6 +22235,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -21880,6 +22244,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21891,6 +22256,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -21899,6 +22265,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21910,6 +22277,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -21918,6 +22286,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21929,6 +22298,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: u = save_relative_uri( @@ -21945,6 +22315,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.intent is not None and "intent" not in r: u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) @@ -21959,6 +22330,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.steps is not None and "steps" not in r: r["steps"] = save( @@ -21967,6 +22339,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21978,6 +22351,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -22086,26 +22460,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -22129,13 +22495,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -22156,7 +22527,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # top refers to the directory level @@ -22251,26 +22623,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -22294,13 +22658,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -22321,7 +22690,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # top refers to the directory level @@ -22416,26 +22786,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -22459,13 +22821,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -22486,7 +22853,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # top refers to the directory level @@ -22581,26 +22949,18 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc is not None: r._yaml_set_line_col(doc.lc.line, doc.lc.col) @@ -22624,13 +22984,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -22651,7 +23016,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # top refers to the directory level @@ -22983,26 +23349,19 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: @@ -23013,11 +23372,6 @@ def save( keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -23043,13 +23397,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -23070,7 +23429,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -23085,6 +23445,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -23093,6 +23454,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -23104,6 +23466,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -23112,6 +23475,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -23123,6 +23487,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -23131,6 +23496,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -23142,6 +23508,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -23150,6 +23517,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -23161,6 +23529,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -23175,6 +23544,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -23183,6 +23553,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -23194,6 +23565,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -23202,6 +23574,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -23213,6 +23586,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.default is not None and "default" not in r: r["default"] = save( @@ -23221,6 +23595,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -23232,6 +23607,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -23240,6 +23616,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -23251,6 +23628,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -23526,26 +23904,19 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: @@ -23556,11 +23927,6 @@ def save( keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -23586,13 +23952,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -23613,7 +23984,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -23628,6 +24000,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -23636,6 +24009,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -23647,6 +24021,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.secondaryFiles is not None and "secondaryFiles" not in r: r["secondaryFiles"] = save( @@ -23655,6 +24030,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -23666,6 +24042,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.streamable is not None and "streamable" not in r: r["streamable"] = save( @@ -23674,6 +24051,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -23685,6 +24063,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -23693,6 +24072,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -23704,6 +24084,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.format is not None and "format" not in r: u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) @@ -23718,6 +24099,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.type is not None and "type" not in r: r["type"] = save( @@ -23726,6 +24108,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -23737,6 +24120,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -24055,26 +24439,19 @@ def save( base_url: str = "", relative_uris: bool = True, keys: Optional[List[Any]] = None, - inserted_line_info: Dict[int, int] = {} + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] r = CommentedMap() - doc = copy.copy(doc_line_info) + keys = copy.copy(keys) - inserted_line_info = copy.copy(inserted_line_info) - - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - doc = None - else: - doc = None - break + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} if doc: if self.id: @@ -24085,11 +24462,6 @@ def save( keys.append(temp_id) temp_doc = doc.get(temp_id) if isinstance(temp_doc, CommentedMap): - temp_doc['id'] = temp_id - temp_doc.lc.add_kv_line_col("id", [doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1], - doc.lc.data[temp_id][0], - doc.lc.data[temp_id][1] + 4]) doc = temp_doc if doc is not None: @@ -24117,13 +24489,18 @@ def save( if hasattr(self, key): if getattr(self, key) is not None: if key != 'class': + line = doc.lc.data[key][0] + shift + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, base_url=base_url_to_save, relative_uris=relative_uris, keys=keys + [key], - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list @@ -24144,7 +24521,8 @@ def save( cols=cols, min_col=min_col, max_len=max_len, - inserted_line_info=inserted_line_info + inserted_line_info=inserted_line_info, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -24159,6 +24537,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.label is not None and "label" not in r: r["label"] = save( @@ -24167,6 +24546,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -24178,6 +24558,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.doc is not None and "doc" not in r: r["doc"] = save( @@ -24186,6 +24567,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -24197,6 +24579,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.inputs is not None and "inputs" not in r: r["inputs"] = save( @@ -24205,6 +24588,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -24216,6 +24600,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.outputs is not None and "outputs" not in r: r["outputs"] = save( @@ -24224,6 +24609,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -24235,6 +24621,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.requirements is not None and "requirements" not in r: r["requirements"] = save( @@ -24243,6 +24630,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -24254,6 +24642,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.hints is not None and "hints" not in r: r["hints"] = save( @@ -24262,6 +24651,7 @@ def save( base_url=str(self.id), relative_uris=relative_uris, inserted_line_info=inserted_line_info, + shift=shift, ) max_len, inserted_line_info = add_kv( old_doc=doc, @@ -24273,6 +24663,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: u = save_relative_uri( @@ -24289,6 +24680,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) if self.intent is not None and "intent" not in r: u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) @@ -24303,6 +24695,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level diff --git a/schema_salad/tests/test_line_numbers.py b/schema_salad/tests/test_line_numbers.py index fe7008f7b..30ca0dd10 100644 --- a/schema_salad/tests/test_line_numbers.py +++ b/schema_salad/tests/test_line_numbers.py @@ -24,29 +24,28 @@ def test_secondary_files_dsl() -> None: "cwlVersion": [1, 0, 1, 12], "baseCommand": [2, 0, 2, 13], "inputs": [4, 0, 5, 2], - "outputs": [10, 0, 11, 2], - "stdout": [19, 0, 21, 8], - "id": [20, 0, 20, 4], + "outputs": [15, 0, 16, 2], + "stdout": [25, 0, 25, 8], + "id": [26, 0, 26, 4], } assert saved_obj["inputs"][0].lc.data == { "type": [6, 3, 6, 9], - "default": [7, 3, 7, 12], - "id": [5, 2, 5, 6], + "secondaryFiles": [10, 3, 13, 19], + "default": [11, 3, 11, 12], + "id": [12, 3, 12, 7], } assert saved_obj["inputs"][0]["type"] == "File" - assert saved_obj["inputs"][1].lc.data == {"id": [8, 2, 8, 6], "type": [9, 2, 9, 8]} + assert saved_obj["inputs"][1].lc.data == {"id": [13, 2, 13, 6], "type": [14, 2, 14, 8]} assert saved_obj["outputs"][0].lc.data == { - "type": [12, 4, 12, 10], - "secondaryFiles": [16, 4, 19, 20], - "outputBinding": [18, 4, 21, 6], - "id": [11, 2, 11, 6], - } - assert saved_obj["outputs"][0]["secondaryFiles"][0].lc.data == { - "pattern": [13, 35, 13, 44] + "type": [17, 4, 17, 10], + "secondaryFiles": [21, 4, 28, 20], + "outputBinding": [22, 4, 23, 6], + "id": [24, 4, 24, 8], } + assert saved_obj["outputs"][0]["secondaryFiles"][0].lc.data == {"pattern": [18, 21, 18, 30]} assert saved_obj["outputs"][0]["secondaryFiles"][1].lc.data == { - "pattern": [14, 35, 14, 44], - "required": [15, 35, 15, 45], + "pattern": [19, 35, 19, 44], + "required": [20, 35, 20, 45], } @@ -59,28 +58,25 @@ def test_outputs_before_inputs() -> None: obj = load_document_by_uri(str(path)) saved_obj = obj.save() assert isinstance(saved_obj, CommentedMap) - assert saved_obj.lc.data == { + assert { "cwlVersion": [1, 0, 1, 12], "baseCommand": [2, 0, 2, 13], "outputs": [4, 0, 5, 2], "inputs": [10, 0, 11, 2], - "stdout": [16, 0, 16, 8], - "id": [17, 0, 17, 4], + "stdout": [17, 0, 17, 8], + "id": [18, 0, 18, 4], } assert saved_obj["inputs"][0].lc.data == { "type": [12, 3, 12, 9], "default": [13, 3, 13, 12], - "id": [11, 2, 11, 6], + "id": [14, 3, 14, 7], } assert saved_obj["inputs"][0]["type"] == "File" - assert saved_obj["inputs"][1].lc.data == { - "id": [14, 2, 14, 6], - "type": [15, 2, 15, 8], - } + assert saved_obj["inputs"][1].lc.data == {"id": [15, 2, 15, 6], "type": [16, 2, 16, 8]} assert saved_obj["outputs"][0].lc.data == { "type": [6, 4, 6, 10], "outputBinding": [7, 4, 8, 6], - "id": [5, 2, 5, 6], + "id": [9, 4, 9, 8], } @@ -95,27 +91,27 @@ def test_type_dsl() -> None: obj = load_document_by_uri(str(path)) saved_obj = obj.save() assert isinstance(saved_obj, CommentedMap) - assert saved_obj.lc.data == { + assert { "cwlVersion": [1, 0, 1, 12], "baseCommand": [2, 0, 2, 13], "inputs": [4, 0, 5, 2], - "outputs": [10, 0, 11, 2], - "stdout": [16, 0, 16, 8], - "id": [17, 0, 17, 4], + "outputs": [11, 0, 12, 2], + "stdout": [17, 0, 17, 8], + "id": [18, 0, 18, 4], } assert saved_obj["inputs"][0].lc.data == { "type": [6, 3, 6, 9], "default": [7, 3, 7, 12], - "id": [5, 2, 5, 6], + "id": [8, 3, 8, 7], } assert saved_obj["inputs"][0]["type"] == ["null", "File"] - assert saved_obj["inputs"][1].lc.data == {"id": [8, 2, 8, 6], "type": [9, 2, 9, 8]} + assert saved_obj["inputs"][1].lc.data == {"id": [9, 2, 9, 6], "type": [10, 2, 10, 8]} assert saved_obj["outputs"][0].lc.data == { - "type": [12, 4, 12, 10], - "outputBinding": [13, 4, 14, 6], - "id": [11, 2, 11, 6], + "type": [13, 4, 13, 10], + "outputBinding": [14, 4, 15, 6], + "id": [16, 4, 16, 8], } - assert saved_obj["outputs"][0]["outputBinding"].lc.data == {"glob": [14, 6, 14, 12]} + assert saved_obj["outputs"][0]["outputBinding"].lc.data == {"glob": [15, 6, 15, 12]} def load_document_by_uri(path: str) -> Any: diff --git a/schema_salad/tests/test_secondary_files_dsl.cwl b/schema_salad/tests/test_secondary_files_dsl.cwl index 1f6c712a4..9b08bc9e9 100644 --- a/schema_salad/tests/test_secondary_files_dsl.cwl +++ b/schema_salad/tests/test_secondary_files_dsl.cwl @@ -5,6 +5,7 @@ baseCommand: python3 inputs: files: type: File + secondaryFiles: ["inputB.txt", "inputC.txt?"] default: "script.py" other_file: File From cc76eb91c756f3cde12802aafc62849d66ce5fe7 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 5 Jun 2023 16:49:24 -0600 Subject: [PATCH 37/44] Updating metaschema.py and updating to pass lint --- cwl_v1_2.py | 26301 ++++++++++++++++++++++ schema_salad/metaschema.py | 318 +- schema_salad/python_codegen.py | 14 +- schema_salad/tests/cwl_v1_0.py | 1496 +- schema_salad/tests/cwl_v1_1.py | 1753 +- schema_salad/tests/cwl_v1_2.py | 427 +- schema_salad/tests/test_line_numbers.py | 11 +- 7 files changed, 28781 insertions(+), 1539 deletions(-) create mode 100644 cwl_v1_2.py diff --git a/cwl_v1_2.py b/cwl_v1_2.py new file mode 100644 index 000000000..3ebc174e9 --- /dev/null +++ b/cwl_v1_2.py @@ -0,0 +1,26301 @@ +# +# This file was autogenerated using schema-salad-tool --codegen=python +# The code itself is released under the Apache 2.0 license and the help text is +# subject to the license of the original schema. +import copy +import logging +import os +import pathlib +import re +import tempfile +import uuid as _uuid__ # pylint: disable=unused-import # noqa: F401 +import xml.sax # nosec +from abc import ABC, abstractmethod +from io import StringIO +from typing import ( + Any, + Dict, + List, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, + no_type_check, +) +from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit +from urllib.request import pathname2url + +from rdflib import Graph +from rdflib.plugins.parsers.notation3 import BadSyntax +from ruamel.yaml.comments import CommentedMap, CommentedSeq + +from schema_salad.exceptions import SchemaSaladException, ValidationException +from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher +from schema_salad.sourceline import SourceLine, add_lc_filename +from schema_salad.utils import CacheType, yaml_no_ts # requires schema-salad v8.2+ + +_vocab: Dict[str, str] = {} +_rvocab: Dict[str, str] = {} + +_logger = logging.getLogger("salad") + + +IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] + +doc_line_info = CommentedMap() + + +class LoadingOptions: + idx: IdxType + fileuri: Optional[str] + baseuri: str + namespaces: MutableMapping[str, str] + schemas: MutableSequence[str] + original_doc: Optional[Any] + addl_metadata: MutableMapping[str, Any] + fetcher: Fetcher + vocab: Dict[str, str] + rvocab: Dict[str, str] + cache: CacheType + imports: List[str] + includes: List[str] + + def __init__( + self, + fetcher: Optional[Fetcher] = None, + namespaces: Optional[Dict[str, str]] = None, + schemas: Optional[List[str]] = None, + fileuri: Optional[str] = None, + copyfrom: Optional["LoadingOptions"] = None, + original_doc: Optional[Any] = None, + addl_metadata: Optional[Dict[str, str]] = None, + baseuri: Optional[str] = None, + idx: Optional[IdxType] = None, + imports: Optional[List[str]] = None, + includes: Optional[List[str]] = None, + ) -> None: + """Create a LoadingOptions object.""" + self.original_doc = original_doc + + if idx is not None: + self.idx = idx + else: + self.idx = copyfrom.idx if copyfrom is not None else {} + + if fileuri is not None: + self.fileuri = fileuri + else: + self.fileuri = copyfrom.fileuri if copyfrom is not None else None + + if baseuri is not None: + self.baseuri = baseuri + else: + self.baseuri = copyfrom.baseuri if copyfrom is not None else "" + + if namespaces is not None: + self.namespaces = namespaces + else: + self.namespaces = copyfrom.namespaces if copyfrom is not None else {} + + if schemas is not None: + self.schemas = schemas + else: + self.schemas = copyfrom.schemas if copyfrom is not None else [] + + if addl_metadata is not None: + self.addl_metadata = addl_metadata + else: + self.addl_metadata = copyfrom.addl_metadata if copyfrom is not None else {} + + if imports is not None: + self.imports = imports + else: + self.imports = copyfrom.imports if copyfrom is not None else [] + + if includes is not None: + self.includes = includes + else: + self.includes = copyfrom.includes if copyfrom is not None else [] + + if fetcher is not None: + self.fetcher = fetcher + elif copyfrom is not None: + self.fetcher = copyfrom.fetcher + else: + import requests + from cachecontrol.caches import FileCache + from cachecontrol.wrapper import CacheControl + + root = pathlib.Path(os.environ.get("HOME", tempfile.gettempdir())) + session = CacheControl( + requests.Session(), + cache=FileCache(root / ".cache" / "salad"), + ) + self.fetcher: Fetcher = DefaultFetcher({}, session) + + self.cache = self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} + + self.vocab = _vocab + self.rvocab = _rvocab + + if namespaces is not None: + self.vocab = self.vocab.copy() + self.rvocab = self.rvocab.copy() + for k, v in namespaces.items(): + self.vocab[k] = v + self.rvocab[v] = k + + @property + def graph(self) -> Graph: + """Generate a merged rdflib.Graph from all entries in self.schemas.""" + graph = Graph() + if not self.schemas: + return graph + key = str(hash(tuple(self.schemas))) + if key in self.cache: + return cast(Graph, self.cache[key]) + for schema in self.schemas: + fetchurl = ( + self.fetcher.urljoin(self.fileuri, schema) + if self.fileuri is not None + else pathlib.Path(schema).resolve().as_uri() + ) + if fetchurl not in self.cache or self.cache[fetchurl] is True: + _logger.debug("Getting external schema %s", fetchurl) + try: + content = self.fetcher.fetch_text(fetchurl) + except Exception as e: + _logger.warning("Could not load extension schema %s: %s", fetchurl, str(e)) + continue + newGraph = Graph() + err_msg = "unknown error" + for fmt in ["xml", "turtle"]: + try: + newGraph.parse(data=content, format=fmt, publicID=str(fetchurl)) + self.cache[fetchurl] = newGraph + graph += newGraph + break + except (xml.sax.SAXParseException, TypeError, BadSyntax) as e: + err_msg = str(e) + else: + _logger.warning("Could not load extension schema %s: %s", fetchurl, err_msg) + self.cache[key] = graph + return graph + + +class Saveable(ABC): + """Mark classes than have a save() and fromDoc() function.""" + + @classmethod + @abstractmethod + def fromDoc( + cls, + _doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Saveable": + """Construct this object from the result of yaml.load().""" + + @abstractmethod + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, + ) -> CommentedMap: + """Convert this object to a JSON/YAML friendly dictionary.""" + + +def load_field(val, fieldtype, baseuri, loadingOptions): + # type: (Union[str, Dict[str, str]], _Loader, str, LoadingOptions) -> Any + if isinstance(val, MutableMapping): + if "$import" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]) + result, metadata = _document_load_by_url( + fieldtype, + url, + loadingOptions, + ) + loadingOptions.imports.append(url) + return result + if "$include" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) + val = loadingOptions.fetcher.fetch_text(url) + loadingOptions.includes.append(url) + return fieldtype.load(val, baseuri, loadingOptions) + + +save_type = Optional[Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str]] + + +def add_kv( + old_doc: CommentedMap, + new_doc: CommentedMap, + line_numbers: Dict[Any, Dict[str, int]], + key: str, + val: Any, + max_len: int, + cols: Dict[int, int], + min_col: int = 0, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, +) -> Tuple[int, Optional[Dict[int, int]]]: + """Add key value pair into Commented Map. + + Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers + for each key/val pair in the old CommentedMap,key/val pair to insert, max_line of the old CommentedMap, + and max col value taken for each line. + """ + if inserted_line_info is None: + inserted_line_info = {} + + if len(inserted_line_info.keys()) >= 1: + max_line = max(inserted_line_info.keys()) + 1 + else: + max_line = 0 + + if key in line_numbers: # If the passed key to insert is in the original CommentedMap as a key + line_info = old_doc.lc.data[key] # Get the line information for the key + if ( + line_info[0] + shift not in inserted_line_info + ): # If the line of the key + shift isn't taken, add it + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[key][0] + shift, + old_doc.lc.data[key][1], + old_doc.lc.data[key][2] + shift, + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[old_doc.lc.data[key][0] + shift] = old_doc.lc.data[key][1] + else: # If the line is already taken + line = line_info[0] + shift + while line in inserted_line_info.keys(): # Find the closest free line + line += 1 + new_doc.lc.add_kv_line_col( + key, + [ + line, + old_doc.lc.data[key][1], + line + (line - old_doc.lc.data[key][2]), + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[line] = old_doc.lc.data[key][1] + return max_len, inserted_line_info + elif isinstance(val, (int, float, str)) and not isinstance( + val, bool + ): # If the value is hashable + if val in line_numbers: # If the value is in the original CommentedMap + line = line_numbers[val]["line"] + shift # Get the line info for the value + if line in inserted_line_info: # Get the appropriate line to place value on + line = max_line + + col = line_numbers[val]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + return max_len, inserted_line_info + elif isinstance(val, str): # Logic for DSL expansition with "?" + if val + "?" in line_numbers: + line = line_numbers[val + "?"]["line"] + shift + if line in inserted_line_info: + line = max_line + col = line_numbers[val + "?"]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + return max_len, inserted_line_info + elif old_doc: + if val in old_doc: + index = old_doc.lc.data.index(val) + line_info = old_doc.lc.data[index] + if line_info[0] + shift not in inserted_line_info: + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[index][0] + shift, + old_doc.lc.data[index][1], + old_doc.lc.data[index][2] + shift, + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[old_doc.lc.data[index][0] + shift] = old_doc.lc.data[index][ + 1 + ] + else: + new_doc.lc.add_kv_line_col( + key, + [ + max_line + shift, + old_doc.lc.data[index][1], + max_line + (max_line - old_doc.lc.data[index][2]) + shift, + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[max_line + shift] = old_doc.lc.data[index][1] + # If neither the key or value is in the original CommentedMap/old doc (or value is not hashable) + new_doc.lc.add_kv_line_col(key, [max_line, min_col, max_line, min_col + len(key) + 2]) + inserted_line_info[max_line] = min_col + len(key) + 2 + return max_len + 1, inserted_line_info + + +@no_type_check +def iterate_through_doc(keys: List[Any]) -> Optional[CommentedMap]: + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + return None + else: + return None + if isinstance(doc, CommentedSeq): + to_return = CommentedMap() + for index, key in enumerate(doc): + to_return[key] = "" + to_return.lc.add_kv_line_col( + key, + [ + doc.lc.data[index][0], + doc.lc.data[index][1], + doc.lc.data[index][0], + doc.lc.data[index][1], + ], + ) + return to_return + return doc + + +def get_line_numbers(doc: Optional[CommentedMap]) -> Dict[Any, Dict[str, int]]: + """Get line numbers for kv pairs in CommentedMap. + + For each key/value pair in a CommentedMap, save the line/col info into a dictionary, + only save value info if value is hashable. + """ + line_numbers: Dict[Any, Dict[str, int]] = {} + if doc is None: + return {} + if doc.lc.data is None: + return {} + for key, value in doc.lc.data.items(): + line_numbers[key] = {} + + line_numbers[key]["line"] = doc.lc.data[key][0] + line_numbers[key]["col"] = doc.lc.data[key][1] + if isinstance(value, (int, float, bool, str)): + line_numbers[value] = {} + line_numbers[value]["line"] = doc.lc.data[key][2] + line_numbers[value]["col"] = doc.lc.data[key][3] + return line_numbers + + +def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> int: + min_col = 0 + for line in line_numbers: + if line_numbers[line]["col"] > min_col: + min_col = line_numbers[line]["col"] + return min_col + + +def get_max_line_num(doc: CommentedMap) -> int: + """Get the max line number for a CommentedMap. + + Iterate through the the key with the highest line number until you reach a non-CommentedMap value + or empty CommentedMap. + """ + max_line = 0 + max_key = "" + cur = doc + while isinstance(cur, CommentedMap) and len(cur) > 0: + for key in cur.lc.data.keys(): + if cur.lc.data[key][2] >= max_line: + max_line = cur.lc.data[key][2] + max_key = key + cur = cur[max_key] + return max_line + 1 + + +def save( + val: Any, + top: bool = True, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, +) -> save_type: + """Save a val of any type. + + Recursively calls save method from class if val is of type Saveable. + Otherwise, saves val to CommentedMap or CommentedSeq. + """ + if keys is None: + keys = [] + + doc = iterate_through_doc(keys) + + if isinstance(val, Saveable): + return val.save( + top=top, + base_url=base_url, + relative_uris=relative_uris, + keys=keys, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if isinstance(val, MutableSequence): + r = CommentedSeq() + r.lc.data = {} + for i in range(0, len(val)): + new_keys = keys + if doc: + if str(i) in doc: + r.lc.data[i] = doc.lc.data[i] + new_keys.append(i) + r.append( + save( + val[i], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, + inserted_line_info=inserted_line_info, + shift=shift, + ) + ) + return r + + if isinstance(val, MutableMapping): + newdict = CommentedMap() + new_keys = keys + for key in val: + + if doc: + if key in doc: + newdict.lc.add_kv_line_col(key, doc.lc.data[key]) + new_keys.append(key) + + newdict[key] = save( + val[key], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + return newdict + if val is None or isinstance(val, (int, float, bool, str)): + return val + raise Exception("Not Saveable: %s" % type(val)) + + +def save_with_metadata( + val: Any, + valLoadingOpts: LoadingOptions, + top: bool = True, + base_url: str = "", + relative_uris: bool = True, +) -> save_type: + """Save and set $namespaces, $schemas, $base and any other metadata fields at the top level.""" + saved_val = save(val, top, base_url, relative_uris) + newdict: MutableMapping[str, Any] = {} + if isinstance(saved_val, MutableSequence): + newdict = {"$graph": saved_val} + elif isinstance(saved_val, MutableMapping): + newdict = saved_val + + if valLoadingOpts.namespaces: + newdict["$namespaces"] = valLoadingOpts.namespaces + if valLoadingOpts.schemas: + newdict["$schemas"] = valLoadingOpts.schemas + if valLoadingOpts.baseuri: + newdict["$base"] = valLoadingOpts.baseuri + for k, v in valLoadingOpts.addl_metadata.items(): + if k not in newdict: + newdict[k] = v + + return newdict + + +def expand_url( + url, # type: str + base_url, # type: str + loadingOptions, # type: LoadingOptions + scoped_id=False, # type: bool + vocab_term=False, # type: bool + scoped_ref=None, # type: Optional[int] +): + # type: (...) -> str + if url in ("@id", "@type"): + return url + + if vocab_term and url in loadingOptions.vocab: + return url + + if bool(loadingOptions.vocab) and ":" in url: + prefix = url.split(":")[0] + if prefix in loadingOptions.vocab: + url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :] + + split = urlsplit(url) + + if ( + (bool(split.scheme) and split.scheme in loadingOptions.fetcher.supported_schemes()) + or url.startswith("$(") + or url.startswith("${") + ): + pass + elif scoped_id and not bool(split.fragment): + splitbase = urlsplit(base_url) + frg = "" + if bool(splitbase.fragment): + frg = splitbase.fragment + "/" + split.path + else: + frg = split.path + pt = splitbase.path if splitbase.path != "" else "/" + url = urlunsplit((splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg)) + elif scoped_ref is not None and not bool(split.fragment): + splitbase = urlsplit(base_url) + sp = splitbase.fragment.split("/") + n = scoped_ref + while n > 0 and len(sp) > 0: + sp.pop() + n -= 1 + sp.append(url) + url = urlunsplit( + ( + splitbase.scheme, + splitbase.netloc, + splitbase.path, + splitbase.query, + "/".join(sp), + ) + ) + else: + url = loadingOptions.fetcher.urljoin(base_url, url) + + if vocab_term: + split = urlsplit(url) + if bool(split.scheme): + if url in loadingOptions.rvocab: + return loadingOptions.rvocab[url] + else: + raise ValidationException(f"Term {url!r} not in vocabulary") + + return url + + +class _Loader: + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + pass + + +class _AnyLoader(_Loader): + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if doc is not None: + return doc + raise ValidationException("Expected non-null") + + +class _PrimitiveLoader(_Loader): + def __init__(self, tp): + # type: (Union[type, Tuple[Type[str], Type[str]]]) -> None + self.tp = tp + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, self.tp): + raise ValidationException( + "Expected a {} but got {}".format( + self.tp.__class__.__name__, doc.__class__.__name__ + ) + ) + return doc + + def __repr__(self): # type: () -> str + return str(self.tp) + + +class _ArrayLoader(_Loader): + def __init__(self, items): + # type: (_Loader) -> None + self.items = items + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, MutableSequence): + raise ValidationException(f"Expected a list, was {type(doc)}") + r = [] # type: List[Any] + errors = [] # type: List[SchemaSaladException] + for i in range(0, len(doc)): + try: + lf = load_field(doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions) + if isinstance(lf, MutableSequence): + r.extend(lf) + else: + r.append(lf) + except ValidationException as e: + errors.append(e.with_sourceline(SourceLine(doc, i, str))) + if errors: + raise ValidationException("", None, errors) + return r + + def __repr__(self): # type: () -> str + return f"array<{self.items}>" + + +class _EnumLoader(_Loader): + def __init__(self, symbols: Sequence[str], name: str) -> None: + self.symbols = symbols + self.name = name + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if doc in self.symbols: + return doc + raise ValidationException(f"Expected one of {self.symbols}") + + def __repr__(self): # type: () -> str + return self.name + + +class _SecondaryDSLLoader(_Loader): + def __init__(self, inner): + # type: (_Loader) -> None + self.inner = inner + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + r: List[Dict[str, Any]] = [] + if isinstance(doc, MutableSequence): + for d in doc: + if isinstance(d, str): + if d.endswith("?"): + r.append({"pattern": d[:-1], "required": False}) + else: + r.append({"pattern": d}) + elif isinstance(d, dict): + new_dict: Dict[str, Any] = {} + dict_copy = copy.deepcopy(d) + if "pattern" in dict_copy: + new_dict["pattern"] = dict_copy.pop("pattern") + else: + raise ValidationException( + f"Missing pattern in secondaryFiles specification entry: {d}" + ) + new_dict["required"] = ( + dict_copy.pop("required") if "required" in dict_copy else None + ) + + if len(dict_copy): + raise ValidationException( + "Unallowed values in secondaryFiles specification entry: {}".format( + dict_copy + ) + ) + r.append(new_dict) + + else: + raise ValidationException( + "Expected a string or sequence of (strings or mappings)." + ) + elif isinstance(doc, MutableMapping): + new_dict = {} + doc_copy = copy.deepcopy(doc) + if "pattern" in doc_copy: + new_dict["pattern"] = doc_copy.pop("pattern") + else: + raise ValidationException( + f"Missing pattern in secondaryFiles specification entry: {doc}" + ) + new_dict["required"] = doc_copy.pop("required") if "required" in doc_copy else None + + if len(doc_copy): + raise ValidationException( + f"Unallowed values in secondaryFiles specification entry: {doc_copy}" + ) + r.append(new_dict) + + elif isinstance(doc, str): + if doc.endswith("?"): + r.append({"pattern": doc[:-1], "required": False}) + else: + r.append({"pattern": doc}) + else: + raise ValidationException("Expected str or sequence of str") + return self.inner.load(r, baseuri, loadingOptions, docRoot) + + +class _RecordLoader(_Loader): + def __init__(self, classtype): + # type: (Type[Saveable]) -> None + self.classtype = classtype + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, MutableMapping): + raise ValidationException(f"Expected a dict, was {type(doc)}") + return self.classtype.fromDoc(doc, baseuri, loadingOptions, docRoot=docRoot) + + def __repr__(self): # type: () -> str + return str(self.classtype.__name__) + + +class _ExpressionLoader(_Loader): + def __init__(self, items: Type[str]) -> None: + self.items = items + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, str): + raise ValidationException(f"Expected a str, was {type(doc)}") + return doc + + +class _UnionLoader(_Loader): + def __init__(self, alternates: Sequence[_Loader]) -> None: + self.alternates = alternates + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + errors = [] + for t in self.alternates: + try: + return t.load(doc, baseuri, loadingOptions, docRoot=docRoot) + except ValidationException as e: + errors.append(ValidationException(f"tried {t} but", None, [e])) + raise ValidationException("", None, errors, "-") + + def __repr__(self): # type: () -> str + return " | ".join(str(a) for a in self.alternates) + + +class _URILoader(_Loader): + def __init__(self, inner, scoped_id, vocab_term, scoped_ref): + # type: (_Loader, bool, bool, Union[int, None]) -> None + self.inner = inner + self.scoped_id = scoped_id + self.vocab_term = vocab_term + self.scoped_ref = scoped_ref + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableSequence): + newdoc = [] + for i in doc: + if isinstance(i, str): + newdoc.append( + expand_url( + i, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + ) + else: + newdoc.append(i) + doc = newdoc + elif isinstance(doc, str): + doc = expand_url( + doc, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + return self.inner.load(doc, baseuri, loadingOptions) + + +class _TypeDSLLoader(_Loader): + typeDSLregex = re.compile(r"^([^[?]+)(\[\])?(\?)?$") + + def __init__(self, inner, refScope): + # type: (_Loader, Union[int, None]) -> None + self.inner = inner + self.refScope = refScope + + def resolve( + self, + doc, # type: str + baseuri, # type: str + loadingOptions, # type: LoadingOptions + ): + # type: (...) -> Union[List[Union[Dict[str, str], str]], Dict[str, str], str] + m = self.typeDSLregex.match(doc) + if m: + group1 = m.group(1) + assert group1 is not None # nosec + first = expand_url(group1, baseuri, loadingOptions, False, True, self.refScope) + second = third = None + if bool(m.group(2)): + second = {"type": "array", "items": first} + # second = CommentedMap((("type", "array"), + # ("items", first))) + # second.lc.add_kv_line_col("type", lc) + # second.lc.add_kv_line_col("items", lc) + # second.lc.filename = filename + if bool(m.group(3)): + third = ["null", second or first] + # third = CommentedSeq(["null", second or first]) + # third.lc.add_kv_line_col(0, lc) + # third.lc.add_kv_line_col(1, lc) + # third.lc.filename = filename + return third or second or first + return doc + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableSequence): + r = [] # type: List[Any] + for d in doc: + if isinstance(d, str): + resolved = self.resolve(d, baseuri, loadingOptions) + if isinstance(resolved, MutableSequence): + for i in resolved: + if i not in r: + r.append(i) + else: + if resolved not in r: + r.append(resolved) + else: + r.append(d) + doc = r + elif isinstance(doc, str): + doc = self.resolve(doc, baseuri, loadingOptions) + + return self.inner.load(doc, baseuri, loadingOptions) + + +class _IdMapLoader(_Loader): + def __init__(self, inner, mapSubject, mapPredicate): + # type: (_Loader, str, Union[str, None]) -> None + self.inner = inner + self.mapSubject = mapSubject + self.mapPredicate = mapPredicate + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableMapping): + r = [] # type: List[Any] + for k in sorted(doc.keys()): + val = doc[k] + if isinstance(val, CommentedMap): + v = copy.copy(val) + v.lc.data = val.lc.data + v.lc.filename = val.lc.filename + v[self.mapSubject] = k + r.append(v) + elif isinstance(val, MutableMapping): + v2 = copy.copy(val) + v2[self.mapSubject] = k + r.append(v2) + else: + if self.mapPredicate: + v3 = {self.mapPredicate: val} + v3[self.mapSubject] = k + r.append(v3) + else: + raise ValidationException("No mapPredicate") + doc = r + return self.inner.load(doc, baseuri, loadingOptions) + + +def _document_load( + loader: _Loader, + doc: Union[CommentedMap, str, MutableMapping[str, Any], MutableSequence[Any]], + baseuri: str, + loadingOptions: LoadingOptions, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> Tuple[Any, LoadingOptions]: + if isinstance(doc, str): + return _document_load_by_url( + loader, + loadingOptions.fetcher.urljoin(baseuri, doc), + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + if isinstance(doc, MutableMapping): + addl_metadata = {} + if addl_metadata_fields is not None: + for mf in addl_metadata_fields: + if mf in doc: + addl_metadata[mf] = doc[mf] + + docuri = baseuri + if "$base" in doc: + baseuri = doc["$base"] + + loadingOptions = LoadingOptions( + copyfrom=loadingOptions, + namespaces=doc.get("$namespaces", None), + schemas=doc.get("$schemas", None), + baseuri=doc.get("$base", None), + addl_metadata=addl_metadata, + ) + + doc = copy.copy(doc) + if "$namespaces" in doc: + doc.pop("$namespaces") + if "$schemas" in doc: + doc.pop("$schemas") + if "$base" in doc: + doc.pop("$base") + + if isinstance(doc, CommentedMap): + global doc_line_info + doc_line_info = doc + + if "$graph" in doc: + loadingOptions.idx[baseuri] = ( + loader.load(doc["$graph"], baseuri, loadingOptions), + loadingOptions, + ) + else: + loadingOptions.idx[baseuri] = ( + loader.load(doc, baseuri, loadingOptions, docRoot=baseuri), + loadingOptions, + ) + + if docuri != baseuri: + loadingOptions.idx[docuri] = loadingOptions.idx[baseuri] + + return loadingOptions.idx[baseuri] + if isinstance(doc, MutableSequence): + loadingOptions.idx[baseuri] = ( + loader.load(doc, baseuri, loadingOptions), + loadingOptions, + ) + return loadingOptions.idx[baseuri] + + raise ValidationException( + "Expected URI string, MutableMapping or MutableSequence, got %s" % type(doc) + ) + + +def _document_load_by_url( + loader: _Loader, + url: str, + loadingOptions: LoadingOptions, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> Tuple[Any, LoadingOptions]: + if url in loadingOptions.idx: + return loadingOptions.idx[url] + + doc_url, frg = urldefrag(url) + + text = loadingOptions.fetcher.fetch_text(doc_url) + textIO = StringIO(text) + textIO.name = str(doc_url) + yaml = yaml_no_ts() + result = yaml.load(textIO) + add_lc_filename(result, doc_url) + + loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=doc_url) + + _document_load( + loader, + result, + doc_url, + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + return loadingOptions.idx[url] + + +def file_uri(path, split_frag=False): # type: (str, bool) -> str + if path.startswith("file://"): + return path + if split_frag: + pathsp = path.split("#", 2) + frag = "#" + quote(str(pathsp[1])) if len(pathsp) == 2 else "" + urlpath = pathname2url(str(pathsp[0])) + else: + urlpath = pathname2url(path) + frag = "" + if urlpath.startswith("//"): + return f"file:{urlpath}{frag}" + return f"file://{urlpath}{frag}" + + +def prefix_url(url: str, namespaces: Dict[str, str]) -> str: + """Expand short forms into full URLs using the given namespace dictionary.""" + for k, v in namespaces.items(): + if url.startswith(v): + return k + ":" + url[len(v) :] + return url + + +def save_relative_uri( + uri: Any, + base_url: str, + scoped_id: bool, + ref_scope: Optional[int], + relative_uris: bool, +) -> Any: + """Convert any URI to a relative one, obeying the scoping rules.""" + if isinstance(uri, MutableSequence): + return [save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) for u in uri] + elif isinstance(uri, str): + if not relative_uris or uri == base_url: + return uri + urisplit = urlsplit(uri) + basesplit = urlsplit(base_url) + if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: + if urisplit.path != basesplit.path: + p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) + if urisplit.fragment: + p = p + "#" + urisplit.fragment + return p + + basefrag = basesplit.fragment + "/" + if ref_scope: + sp = basefrag.split("/") + i = 0 + while i < ref_scope: + sp.pop() + i += 1 + basefrag = "/".join(sp) + + if urisplit.fragment.startswith(basefrag): + return urisplit.fragment[len(basefrag) :] + return urisplit.fragment + return uri + else: + return save(uri, top=False, base_url=base_url, relative_uris=relative_uris) + + +def shortname(inputid: str) -> str: + """ + Compute the shortname of a fully qualified identifier. + + See https://w3id.org/cwl/v1.2/SchemaSalad.html#Short_names. + """ + parsed_id = urlparse(inputid) + if parsed_id.fragment: + return parsed_id.fragment.split("/")[-1] + return parsed_id.path.split("/")[-1] + + +def parser_info() -> str: + return "org.w3id.cwl.v1_2" + + +class Documented(Saveable): + pass + + +class RecordField(Documented): + """ + A field of a record. + """ + + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, RecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash((self.doc, self.name, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "RecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'RecordField'", None, _errors__) + _constructed = cls( + doc=doc, + name=name, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["doc", "name", "type"]) + + +class RecordSchema(Saveable): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, RecordSchema): + return bool(self.fields == other.fields and self.type == other.type) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "RecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'fields' field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'RecordSchema'", None, _errors__) + _constructed = cls( + fields=fields, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type"]) + + +class EnumSchema(Saveable): + """ + Define an enumerated type. + + """ + + def __init__( + self, + symbols: Any, + type: Any, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.symbols = symbols + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.symbols, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "EnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'symbols' field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'EnumSchema'", None, _errors__) + _constructed = cls( + name=name, + symbols=symbols, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type"]) + + +class ArraySchema(Saveable): + def __init__( + self, + items: Any, + type: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ArraySchema): + return bool(self.items == other.items and self.type == other.type) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + items = load_field( + _doc.get("items"), + uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'items' field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ArraySchema'", None, _errors__) + _constructed = cls( + items=items, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.items is not None and "items" not in r: + u = save_relative_uri(self.items, base_url, False, 2, relative_uris) + r["items"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type"]) + + +class File(Saveable): + """ + Represents a file (or group of files when `secondaryFiles` is provided) that + will be accessible by tools using standard POSIX file system call API such as + open(2) and read(2). + + Files are represented as objects with `class` of `File`. File objects have + a number of properties that provide metadata about the file. + + The `location` property of a File is a IRI that uniquely identifies the + file. Implementations must support the `file://` IRI scheme and may support + other schemes such as `http://` and `https://`. The value of `location` may also be a + relative reference, in which case it must be resolved relative to the IRI + of the document it appears in. Alternately to `location`, implementations + must also accept the `path` property on File, which must be a filesystem + path available on the same host as the CWL runner (for inputs) or the + runtime environment of a command line tool execution (for command line tool + outputs). + + If no `location` or `path` is specified, a file object must specify + `contents` with the UTF-8 text content of the file. This is a "file + literal". File literals do not correspond to external resources, but are + created on disk with `contents` with when needed for executing a tool. + Where appropriate, expressions can return file literals to define new files + on a runtime. The maximum size of `contents` is 64 kilobytes. + + The `basename` property defines the filename on disk where the file is + staged. This may differ from the resource name. If not provided, + `basename` must be computed from the last path part of `location` and made + available to expressions. + + The `secondaryFiles` property is a list of File or Directory objects that + must be staged in the same directory as the primary file. It is an error + for file names to be duplicated in `secondaryFiles`. + + The `size` property is the size in bytes of the File. It must be computed + from the resource and made available to expressions. The `checksum` field + contains a cryptographic hash of the file content for use it verifying file + contents. Implementations may, at user option, enable or disable + computation of the `checksum` field for performance or other reasons. + However, the ability to compute output checksums is required to pass the + CWL conformance test suite. + + When executing a CommandLineTool, the files and secondary files may be + staged to an arbitrary directory, but must use the value of `basename` for + the filename. The `path` property must be file path in the context of the + tool execution runtime (local to the compute node, or within the executing + container). All computed properties should be available to expressions. + File literals also must be staged and `path` must be set. + + When collecting CommandLineTool outputs, `glob` matching returns file paths + (with the `path` property) and the derived properties. This can all be + modified by `outputEval`. Alternately, if the file `cwl.output.json` is + present in the output, `outputBinding` is ignored. + + File objects in the output must provide either a `location` IRI or a `path` + property in the context of the tool execution runtime (local to the compute + node, or within the executing container). + + When evaluating an ExpressionTool, file objects must be referenced via + `location` (the expression tool does not have access to files on disk so + `path` is meaningless) or as file literals. It is legal to return a file + object with an existing `location` but a different `basename`. The + `loadContents` field of ExpressionTool inputs behaves the same as on + CommandLineTool inputs, however it is not meaningful on the outputs. + + An ExpressionTool may forward file references from input to output by using + the same value for `location`. + + """ + + def __init__( + self, + location: Optional[Any] = None, + path: Optional[Any] = None, + basename: Optional[Any] = None, + dirname: Optional[Any] = None, + nameroot: Optional[Any] = None, + nameext: Optional[Any] = None, + checksum: Optional[Any] = None, + size: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + format: Optional[Any] = None, + contents: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "File" + self.location = location + self.path = path + self.basename = basename + self.dirname = dirname + self.nameroot = nameroot + self.nameext = nameext + self.checksum = checksum + self.size = size + self.secondaryFiles = secondaryFiles + self.format = format + self.contents = contents + + def __eq__(self, other: Any) -> bool: + if isinstance(other, File): + return bool( + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.dirname == other.dirname + and self.nameroot == other.nameroot + and self.nameext == other.nameext + and self.checksum == other.checksum + and self.size == other.size + and self.secondaryFiles == other.secondaryFiles + and self.format == other.format + and self.contents == other.contents + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.location, + self.path, + self.basename, + self.dirname, + self.nameroot, + self.nameext, + self.checksum, + self.size, + self.secondaryFiles, + self.format, + self.contents, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "File": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "File": + raise ValidationException("Not a File") + + if "location" in _doc: + try: + location = load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'location' field is not valid because:", + SourceLine(_doc, "location", str), + [e], + ) + ) + else: + location = None + if "path" in _doc: + try: + path = load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'path' field is not valid because:", + SourceLine(_doc, "path", str), + [e], + ) + ) + else: + path = None + if "basename" in _doc: + try: + basename = load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'basename' field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + ) + ) + else: + basename = None + if "dirname" in _doc: + try: + dirname = load_field( + _doc.get("dirname"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'dirname' field is not valid because:", + SourceLine(_doc, "dirname", str), + [e], + ) + ) + else: + dirname = None + if "nameroot" in _doc: + try: + nameroot = load_field( + _doc.get("nameroot"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'nameroot' field is not valid because:", + SourceLine(_doc, "nameroot", str), + [e], + ) + ) + else: + nameroot = None + if "nameext" in _doc: + try: + nameext = load_field( + _doc.get("nameext"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'nameext' field is not valid because:", + SourceLine(_doc, "nameext", str), + [e], + ) + ) + else: + nameext = None + if "checksum" in _doc: + try: + checksum = load_field( + _doc.get("checksum"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'checksum' field is not valid because:", + SourceLine(_doc, "checksum", str), + [e], + ) + ) + else: + checksum = None + if "size" in _doc: + try: + size = load_field( + _doc.get("size"), + union_of_None_type_or_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'size' field is not valid because:", + SourceLine(_doc, "size", str), + [e], + ) + ) + else: + size = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "contents" in _doc: + try: + contents = load_field( + _doc.get("contents"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'contents' field is not valid because:", + SourceLine(_doc, "contents", str), + [e], + ) + ) + else: + contents = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `dirname`, `nameroot`, `nameext`, `checksum`, `size`, `secondaryFiles`, `format`, `contents`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'File'", None, _errors__) + _constructed = cls( + location=location, + path=path, + basename=basename, + dirname=dirname, + nameroot=nameroot, + nameext=nameext, + checksum=checksum, + size=size, + secondaryFiles=secondaryFiles, + format=format, + contents=contents, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "File" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.location is not None and "location" not in r: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="location", + val=r.get("location"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.path is not None and "path" not in r: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="path", + val=r.get("path"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.basename is not None and "basename" not in r: + r["basename"] = save( + self.basename, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="basename", + val=r.get("basename"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.dirname is not None and "dirname" not in r: + r["dirname"] = save( + self.dirname, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dirname", + val=r.get("dirname"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.nameroot is not None and "nameroot" not in r: + r["nameroot"] = save( + self.nameroot, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="nameroot", + val=r.get("nameroot"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.nameext is not None and "nameext" not in r: + r["nameext"] = save( + self.nameext, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="nameext", + val=r.get("nameext"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.checksum is not None and "checksum" not in r: + r["checksum"] = save( + self.checksum, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="checksum", + val=r.get("checksum"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.size is not None and "size" not in r: + r["size"] = save( + self.size, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="size", + val=r.get("size"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, base_url, True, None, relative_uris) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.contents is not None and "contents" not in r: + r["contents"] = save( + self.contents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="contents", + val=r.get("contents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "location", + "path", + "basename", + "dirname", + "nameroot", + "nameext", + "checksum", + "size", + "secondaryFiles", + "format", + "contents", + ] + ) + + +class Directory(Saveable): + """ + Represents a directory to present to a command line tool. + + Directories are represented as objects with `class` of `Directory`. Directory objects have + a number of properties that provide metadata about the directory. + + The `location` property of a Directory is a IRI that uniquely identifies + the directory. Implementations must support the file:// IRI scheme and may + support other schemes such as http://. Alternately to `location`, + implementations must also accept the `path` property on Directory, which + must be a filesystem path available on the same host as the CWL runner (for + inputs) or the runtime environment of a command line tool execution (for + command line tool outputs). + + A Directory object may have a `listing` field. This is a list of File and + Directory objects that are contained in the Directory. For each entry in + `listing`, the `basename` property defines the name of the File or + Subdirectory when staged to disk. If `listing` is not provided, the + implementation must have some way of fetching the Directory listing at + runtime based on the `location` field. + + If a Directory does not have `location`, it is a Directory literal. A + Directory literal must provide `listing`. Directory literals must be + created on disk at runtime as needed. + + The resources in a Directory literal do not need to have any implied + relationship in their `location`. For example, a Directory listing may + contain two files located on different hosts. It is the responsibility of + the runtime to ensure that those files are staged to disk appropriately. + Secondary files associated with files in `listing` must also be staged to + the same Directory. + + When executing a CommandLineTool, Directories must be recursively staged + first and have local values of `path` assigned. + + Directory objects in CommandLineTool output must provide either a + `location` IRI or a `path` property in the context of the tool execution + runtime (local to the compute node, or within the executing container). + + An ExpressionTool may forward file references from input to output by using + the same value for `location`. + + Name conflicts (the same `basename` appearing multiple times in `listing` + or in any entry in `secondaryFiles` in the listing) is a fatal error. + + """ + + def __init__( + self, + location: Optional[Any] = None, + path: Optional[Any] = None, + basename: Optional[Any] = None, + listing: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "Directory" + self.location = location + self.path = path + self.basename = basename + self.listing = listing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Directory): + return bool( + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.listing == other.listing + ) + return False + + def __hash__(self) -> int: + return hash( + (self.class_, self.location, self.path, self.basename, self.listing) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Directory": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "Directory": + raise ValidationException("Not a Directory") + + if "location" in _doc: + try: + location = load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'location' field is not valid because:", + SourceLine(_doc, "location", str), + [e], + ) + ) + else: + location = None + if "path" in _doc: + try: + path = load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'path' field is not valid because:", + SourceLine(_doc, "path", str), + [e], + ) + ) + else: + path = None + if "basename" in _doc: + try: + basename = load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'basename' field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + ) + ) + else: + basename = None + if "listing" in _doc: + try: + listing = load_field( + _doc.get("listing"), + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'listing' field is not valid because:", + SourceLine(_doc, "listing", str), + [e], + ) + ) + else: + listing = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `listing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Directory'", None, _errors__) + _constructed = cls( + location=location, + path=path, + basename=basename, + listing=listing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "Directory" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.location is not None and "location" not in r: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="location", + val=r.get("location"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.path is not None and "path" not in r: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="path", + val=r.get("path"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.basename is not None and "basename" not in r: + r["basename"] = save( + self.basename, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="basename", + val=r.get("basename"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.listing is not None and "listing" not in r: + r["listing"] = save( + self.listing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="listing", + val=r.get("listing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "location", "path", "basename", "listing"]) + + +class Labeled(Saveable): + pass + + +class Identified(Saveable): + pass + + +class LoadContents(Saveable): + pass + + +class FieldBase(Labeled): + pass + + +class InputFormat(Saveable): + pass + + +class OutputFormat(Saveable): + pass + + +class Parameter(FieldBase, Documented, Identified): + """ + Define an input or output parameter to a process. + + """ + + pass + + +class InputBinding(Saveable): + def __init__( + self, + loadContents: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputBinding): + return bool(self.loadContents == other.loadContents) + return False + + def __hash__(self) -> int: + return hash((self.loadContents)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputBinding": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `loadContents`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputBinding'", None, _errors__) + _constructed = cls( + loadContents=loadContents, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["loadContents"]) + + +class IOSchema(Labeled, Documented): + pass + + +class InputSchema(IOSchema): + pass + + +class OutputSchema(IOSchema): + pass + + +class InputRecordField(RecordField, FieldBase, InputFormat, LoadContents): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.loadContents, + self.loadListing, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadListing' field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputRecordField'", None, _errors__) + _constructed = cls( + doc=doc, + name=name, + type=type, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + loadContents=loadContents, + loadListing=loadListing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "loadContents", + "loadListing", + ] + ) + + +class InputRecordSchema(RecordSchema, InputSchema): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'fields' field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputRecordSchema'", None, _errors__) + _constructed = cls( + fields=fields, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name"]) + + +class InputEnumSchema(EnumSchema, InputSchema): + def __init__( + self, + symbols: Any, + type: Any, + name: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.symbols = symbols + self.type = type + self.label = label + self.doc = doc + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputEnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.symbols, self.type, self.label, self.doc)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'symbols' field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputEnumSchema'", None, _errors__) + _constructed = cls( + name=name, + symbols=symbols, + type=type, + label=label, + doc=doc, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type", "label", "doc"]) + + +class InputArraySchema(ArraySchema, InputSchema): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + items = load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'items' field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputArraySchema'", None, _errors__) + _constructed = cls( + items=items, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.items is not None and "items" not in r: + u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) + r["items"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name"]) + + +class OutputRecordField(RecordField, FieldBase, OutputFormat): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputRecordField'", None, _errors__) + _constructed = cls( + doc=doc, + name=name, + type=type, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + ["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"] + ) + + +class OutputRecordSchema(RecordSchema, OutputSchema): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'fields' field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputRecordSchema'", None, _errors__) + _constructed = cls( + fields=fields, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name"]) + + +class OutputEnumSchema(EnumSchema, OutputSchema): + def __init__( + self, + symbols: Any, + type: Any, + name: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.symbols = symbols + self.type = type + self.label = label + self.doc = doc + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputEnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.symbols, self.type, self.label, self.doc)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'symbols' field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputEnumSchema'", None, _errors__) + _constructed = cls( + name=name, + symbols=symbols, + type=type, + label=label, + doc=doc, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type", "label", "doc"]) + + +class OutputArraySchema(ArraySchema, OutputSchema): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + items = load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'items' field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputArraySchema'", None, _errors__) + _constructed = cls( + items=items, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.items is not None and "items" not in r: + u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) + r["items"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name"]) + + +class InputParameter(Parameter, InputFormat, LoadContents): + pass + + +class OutputParameter(Parameter, OutputFormat): + pass + + +class ProcessRequirement(Saveable): + """ + A process requirement declares a prerequisite that may or must be fulfilled + before executing a process. See [`Process.hints`](#process) and + [`Process.requirements`](#process). + + Process requirements are the primary mechanism for specifying extensions to + the CWL core specification. + + """ + + pass + + +class Process(Identified, Labeled, Documented): + """ + + The base executable type in CWL is the `Process` object defined by the + document. Note that the `Process` object is abstract and cannot be + directly executed. + + """ + + pass + + +class InlineJavascriptRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support inline Javascript expressions. + If this requirement is not present, the workflow platform must not perform expression + interpolation. + + """ + + def __init__( + self, + expressionLib: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "InlineJavascriptRequirement" + self.expressionLib = expressionLib + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InlineJavascriptRequirement): + return bool( + self.class_ == other.class_ + and self.expressionLib == other.expressionLib + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.expressionLib)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InlineJavascriptRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "InlineJavascriptRequirement": + raise ValidationException("Not a InlineJavascriptRequirement") + + if "expressionLib" in _doc: + try: + expressionLib = load_field( + _doc.get("expressionLib"), + union_of_None_type_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'expressionLib' field is not valid because:", + SourceLine(_doc, "expressionLib", str), + [e], + ) + ) + else: + expressionLib = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `expressionLib`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'InlineJavascriptRequirement'", None, _errors__ + ) + _constructed = cls( + expressionLib=expressionLib, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "InlineJavascriptRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.expressionLib is not None and "expressionLib" not in r: + r["expressionLib"] = save( + self.expressionLib, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="expressionLib", + val=r.get("expressionLib"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "expressionLib"]) + + +class CommandInputSchema(Saveable): + pass + + +class SchemaDefRequirement(ProcessRequirement): + """ + This field consists of an array of type definitions which must be used when + interpreting the `inputs` and `outputs` fields. When a `type` field + contains a IRI, the implementation must check if the type is defined in + `schemaDefs` and use that definition. If the type is not found in + `schemaDefs`, it is an error. The entries in `schemaDefs` must be + processed in the order listed such that later schema definitions may refer + to earlier schema definitions. + + - **Type definitions are allowed for `enum` and `record` types only.** + - Type definitions may be shared by defining them in a file and then + `$include`-ing them in the `types` field. + - A file can contain a list of type definitions + + """ + + def __init__( + self, + types: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SchemaDefRequirement" + self.types = types + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SchemaDefRequirement): + return bool(self.class_ == other.class_ and self.types == other.types) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.types)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SchemaDefRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "SchemaDefRequirement": + raise ValidationException("Not a SchemaDefRequirement") + + try: + types = load_field( + _doc.get("types"), + array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'types' field is not valid because:", + SourceLine(_doc, "types", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `types`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SchemaDefRequirement'", None, _errors__) + _constructed = cls( + types=types, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "SchemaDefRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.types is not None and "types" not in r: + r["types"] = save( + self.types, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="types", + val=r.get("types"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "types"]) + + +class SecondaryFileSchema(Saveable): + """ + Secondary files are specified using the following micro-DSL for secondary files: + + * If the value is a string, it is transformed to an object with two fields + `pattern` and `required` + * By default, the value of `required` is `null` + (this indicates default behavior, which may be based on the context) + * If the value ends with a question mark `?` the question mark is + stripped off and the value of the field `required` is set to `False` + * The remaining value is assigned to the field `pattern` + + For implementation details and examples, please see + [this section](SchemaSalad.html#Domain_Specific_Language_for_secondary_files) + in the Schema Salad specification. + + """ + + def __init__( + self, + pattern: Any, + required: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.pattern = pattern + self.required = required + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SecondaryFileSchema): + return bool( + self.pattern == other.pattern and self.required == other.required + ) + return False + + def __hash__(self) -> int: + return hash((self.pattern, self.required)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SecondaryFileSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + pattern = load_field( + _doc.get("pattern"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'pattern' field is not valid because:", + SourceLine(_doc, "pattern", str), + [e], + ) + ) + if "required" in _doc: + try: + required = load_field( + _doc.get("required"), + union_of_None_type_or_booltype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'required' field is not valid because:", + SourceLine(_doc, "required", str), + [e], + ) + ) + else: + required = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `pattern`, `required`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SecondaryFileSchema'", None, _errors__) + _constructed = cls( + pattern=pattern, + required=required, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.pattern is not None and "pattern" not in r: + r["pattern"] = save( + self.pattern, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="pattern", + val=r.get("pattern"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.required is not None and "required" not in r: + r["required"] = save( + self.required, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="required", + val=r.get("required"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["pattern", "required"]) + + +class LoadListingRequirement(ProcessRequirement): + """ + Specify the desired behavior for loading the `listing` field of + a Directory object for use by expressions. + + """ + + def __init__( + self, + loadListing: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "LoadListingRequirement" + self.loadListing = loadListing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, LoadListingRequirement): + return bool( + self.class_ == other.class_ and self.loadListing == other.loadListing + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.loadListing)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "LoadListingRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "LoadListingRequirement": + raise ValidationException("Not a LoadListingRequirement") + + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadListing' field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `loadListing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'LoadListingRequirement'", None, _errors__ + ) + _constructed = cls( + loadListing=loadListing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "LoadListingRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "loadListing"]) + + +class EnvironmentDef(Saveable): + """ + Define an environment variable that will be set in the runtime environment + by the workflow platform when executing the command line tool. May be the + result of executing an expression, such as getting a parameter from input. + + """ + + def __init__( + self, + envName: Any, + envValue: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.envName = envName + self.envValue = envValue + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnvironmentDef): + return bool( + self.envName == other.envName and self.envValue == other.envValue + ) + return False + + def __hash__(self) -> int: + return hash((self.envName, self.envValue)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "EnvironmentDef": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + envName = load_field( + _doc.get("envName"), + strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'envName' field is not valid because:", + SourceLine(_doc, "envName", str), + [e], + ) + ) + try: + envValue = load_field( + _doc.get("envValue"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'envValue' field is not valid because:", + SourceLine(_doc, "envValue", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `envName`, `envValue`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'EnvironmentDef'", None, _errors__) + _constructed = cls( + envName=envName, + envValue=envValue, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.envName is not None and "envName" not in r: + r["envName"] = save( + self.envName, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="envName", + val=r.get("envName"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.envValue is not None and "envValue" not in r: + r["envValue"] = save( + self.envValue, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="envValue", + val=r.get("envValue"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["envName", "envValue"]) + + +class CommandLineBinding(InputBinding): + """ + + When listed under `inputBinding` in the input schema, the term + "value" refers to the corresponding value in the input object. For + binding objects listed in `CommandLineTool.arguments`, the term "value" + refers to the effective value after evaluating `valueFrom`. + + The binding behavior when building the command line depends on the data + type of the value. If there is a mismatch between the type described by + the input schema and the effective value, such as resulting from an + expression evaluation, an implementation must use the data type of the + effective value. + + - **string**: Add `prefix` and the string to the command line. + + - **number**: Add `prefix` and decimal representation to command line. + + - **boolean**: If true, add `prefix` to the command line. If false, add + nothing. + + - **File**: Add `prefix` and the value of + [`File.path`](#File) to the command line. + + - **Directory**: Add `prefix` and the value of + [`Directory.path`](#Directory) to the command line. + + - **array**: If `itemSeparator` is specified, add `prefix` and the join + the array into a single string with `itemSeparator` separating the + items. Otherwise, first add `prefix`, then recursively process + individual elements. + If the array is empty, it does not add anything to command line. + + - **object**: Add `prefix` only, and recursively add object fields for + which `inputBinding` is specified. + + - **null**: Add nothing. + + """ + + def __init__( + self, + loadContents: Optional[Any] = None, + position: Optional[Any] = None, + prefix: Optional[Any] = None, + separate: Optional[Any] = None, + itemSeparator: Optional[Any] = None, + valueFrom: Optional[Any] = None, + shellQuote: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + self.position = position + self.prefix = prefix + self.separate = separate + self.itemSeparator = itemSeparator + self.valueFrom = valueFrom + self.shellQuote = shellQuote + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandLineBinding): + return bool( + self.loadContents == other.loadContents + and self.position == other.position + and self.prefix == other.prefix + and self.separate == other.separate + and self.itemSeparator == other.itemSeparator + and self.valueFrom == other.valueFrom + and self.shellQuote == other.shellQuote + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.loadContents, + self.position, + self.prefix, + self.separate, + self.itemSeparator, + self.valueFrom, + self.shellQuote, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandLineBinding": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "position" in _doc: + try: + position = load_field( + _doc.get("position"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'position' field is not valid because:", + SourceLine(_doc, "position", str), + [e], + ) + ) + else: + position = None + if "prefix" in _doc: + try: + prefix = load_field( + _doc.get("prefix"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'prefix' field is not valid because:", + SourceLine(_doc, "prefix", str), + [e], + ) + ) + else: + prefix = None + if "separate" in _doc: + try: + separate = load_field( + _doc.get("separate"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'separate' field is not valid because:", + SourceLine(_doc, "separate", str), + [e], + ) + ) + else: + separate = None + if "itemSeparator" in _doc: + try: + itemSeparator = load_field( + _doc.get("itemSeparator"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'itemSeparator' field is not valid because:", + SourceLine(_doc, "itemSeparator", str), + [e], + ) + ) + else: + itemSeparator = None + if "valueFrom" in _doc: + try: + valueFrom = load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'valueFrom' field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [e], + ) + ) + else: + valueFrom = None + if "shellQuote" in _doc: + try: + shellQuote = load_field( + _doc.get("shellQuote"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'shellQuote' field is not valid because:", + SourceLine(_doc, "shellQuote", str), + [e], + ) + ) + else: + shellQuote = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `loadContents`, `position`, `prefix`, `separate`, `itemSeparator`, `valueFrom`, `shellQuote`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandLineBinding'", None, _errors__) + _constructed = cls( + loadContents=loadContents, + position=position, + prefix=prefix, + separate=separate, + itemSeparator=itemSeparator, + valueFrom=valueFrom, + shellQuote=shellQuote, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.position is not None and "position" not in r: + r["position"] = save( + self.position, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="position", + val=r.get("position"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.prefix is not None and "prefix" not in r: + r["prefix"] = save( + self.prefix, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="prefix", + val=r.get("prefix"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.separate is not None and "separate" not in r: + r["separate"] = save( + self.separate, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="separate", + val=r.get("separate"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.itemSeparator is not None and "itemSeparator" not in r: + r["itemSeparator"] = save( + self.itemSeparator, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="itemSeparator", + val=r.get("itemSeparator"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.valueFrom is not None and "valueFrom" not in r: + r["valueFrom"] = save( + self.valueFrom, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="valueFrom", + val=r.get("valueFrom"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.shellQuote is not None and "shellQuote" not in r: + r["shellQuote"] = save( + self.shellQuote, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="shellQuote", + val=r.get("shellQuote"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "loadContents", + "position", + "prefix", + "separate", + "itemSeparator", + "valueFrom", + "shellQuote", + ] + ) + + +class CommandOutputBinding(LoadContents): + """ + Describes how to generate an output parameter based on the files produced + by a CommandLineTool. + + The output parameter value is generated by applying these operations in the + following order: + + - glob + - loadContents + - outputEval + - secondaryFiles + + """ + + def __init__( + self, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + glob: Optional[Any] = None, + outputEval: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + self.loadListing = loadListing + self.glob = glob + self.outputEval = outputEval + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputBinding): + return bool( + self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.glob == other.glob + and self.outputEval == other.outputEval + ) + return False + + def __hash__(self) -> int: + return hash((self.loadContents, self.loadListing, self.glob, self.outputEval)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputBinding": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadListing' field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "glob" in _doc: + try: + glob = load_field( + _doc.get("glob"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'glob' field is not valid because:", + SourceLine(_doc, "glob", str), + [e], + ) + ) + else: + glob = None + if "outputEval" in _doc: + try: + outputEval = load_field( + _doc.get("outputEval"), + union_of_None_type_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outputEval' field is not valid because:", + SourceLine(_doc, "outputEval", str), + [e], + ) + ) + else: + outputEval = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `loadContents`, `loadListing`, `glob`, `outputEval`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandOutputBinding'", None, _errors__) + _constructed = cls( + loadContents=loadContents, + loadListing=loadListing, + glob=glob, + outputEval=outputEval, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.glob is not None and "glob" not in r: + r["glob"] = save( + self.glob, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="glob", + val=r.get("glob"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outputEval is not None and "outputEval" not in r: + r["outputEval"] = save( + self.outputEval, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputEval", + val=r.get("outputEval"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["loadContents", "loadListing", "glob", "outputEval"]) + + +class CommandLineBindable(Saveable): + def __init__( + self, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandLineBindable): + return bool(self.inputBinding == other.inputBinding) + return False + + def __hash__(self) -> int: + return hash((self.inputBinding)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandLineBindable": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputBinding' field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandLineBindable'", None, _errors__) + _constructed = cls( + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["inputBinding"]) + + +class CommandInputRecordField(InputRecordField, CommandLineBindable): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.loadContents, + self.loadListing, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadListing' field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputBinding' field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputRecordField'", None, _errors__ + ) + _constructed = cls( + doc=doc, + name=name, + type=type, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + loadContents=loadContents, + loadListing=loadListing, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "loadContents", + "loadListing", + "inputBinding", + ] + ) + + +class CommandInputRecordSchema( + InputRecordSchema, CommandInputSchema, CommandLineBindable +): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.doc = doc + self.name = name + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + (self.fields, self.type, self.label, self.doc, self.name, self.inputBinding) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'fields' field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputBinding' field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputRecordSchema'", None, _errors__ + ) + _constructed = cls( + fields=fields, + type=type, + label=label, + doc=doc, + name=name, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name", "inputBinding"]) + + +class CommandInputEnumSchema(InputEnumSchema, CommandInputSchema, CommandLineBindable): + def __init__( + self, + symbols: Any, + type: Any, + name: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.symbols = symbols + self.type = type + self.label = label + self.doc = doc + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputEnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.name, + self.symbols, + self.type, + self.label, + self.doc, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'symbols' field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputBinding' field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputEnumSchema'", None, _errors__ + ) + _constructed = cls( + name=name, + symbols=symbols, + type=type, + label=label, + doc=doc, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type", "label", "doc", "inputBinding"]) + + +class CommandInputArraySchema( + InputArraySchema, CommandInputSchema, CommandLineBindable +): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.doc = doc + self.name = name + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + (self.items, self.type, self.label, self.doc, self.name, self.inputBinding) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + items = load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'items' field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputBinding' field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputArraySchema'", None, _errors__ + ) + _constructed = cls( + items=items, + type=type, + label=label, + doc=doc, + name=name, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.items is not None and "items" not in r: + u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) + r["items"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name", "inputBinding"]) + + +class CommandOutputRecordField(OutputRecordField): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + outputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.outputBinding = outputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.outputBinding == other.outputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.outputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outputBinding' field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `outputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputRecordField'", None, _errors__ + ) + _constructed = cls( + doc=doc, + name=name, + type=type, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + outputBinding=outputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "outputBinding", + ] + ) + + +class CommandOutputRecordSchema(OutputRecordSchema): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'fields' field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputRecordSchema'", None, _errors__ + ) + _constructed = cls( + fields=fields, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name"]) + + +class CommandOutputEnumSchema(OutputEnumSchema): + def __init__( + self, + symbols: Any, + type: Any, + name: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.symbols = symbols + self.type = type + self.label = label + self.doc = doc + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputEnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.symbols, self.type, self.label, self.doc)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'symbols' field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputEnumSchema'", None, _errors__ + ) + _constructed = cls( + name=name, + symbols=symbols, + type=type, + label=label, + doc=doc, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type", "label", "doc"]) + + +class CommandOutputArraySchema(OutputArraySchema): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + items = load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'items' field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputArraySchema'", None, _errors__ + ) + _constructed = cls( + items=items, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.items is not None and "items" not in r: + u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) + r["items"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name"]) + + +class CommandInputParameter(InputParameter): + """ + An input parameter for a CommandLineTool. + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + default: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default + self.type = type + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default + and self.type == other.type + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.loadContents, + self.loadListing, + self.default, + self.type, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadListing' field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'default' field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputBinding' field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandInputParameter'", None, _errors__) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + loadContents=loadContents, + loadListing=loadListing, + default=default, + type=type, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "loadContents", + "loadListing", + "default", + "type", + "inputBinding", + ] + ) + + +class CommandOutputParameter(OutputParameter): + """ + An output parameter for a CommandLineTool. + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + outputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.type = type + self.outputBinding = outputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.type == other.type + and self.outputBinding == other.outputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.type, + self.outputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outputBinding' field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`, `outputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + type=type, + outputBinding=outputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "type", + "outputBinding", + ] + ) + + +class CommandLineTool(Process): + """ + This defines the schema of the CWL Command Line Tool Description document. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + intent: Optional[Any] = None, + baseCommand: Optional[Any] = None, + arguments: Optional[Any] = None, + stdin: Optional[Any] = None, + stderr: Optional[Any] = None, + stdout: Optional[Any] = None, + successCodes: Optional[Any] = None, + temporaryFailCodes: Optional[Any] = None, + permanentFailCodes: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_ = "CommandLineTool" + self.baseCommand = baseCommand + self.arguments = arguments + self.stdin = stdin + self.stderr = stderr + self.stdout = stdout + self.successCodes = successCodes + self.temporaryFailCodes = temporaryFailCodes + self.permanentFailCodes = permanentFailCodes + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandLineTool): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + and self.baseCommand == other.baseCommand + and self.arguments == other.arguments + and self.stdin == other.stdin + and self.stderr == other.stderr + and self.stdout == other.stdout + and self.successCodes == other.successCodes + and self.temporaryFailCodes == other.temporaryFailCodes + and self.permanentFailCodes == other.permanentFailCodes + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + self.baseCommand, + self.arguments, + self.stdin, + self.stderr, + self.stdout, + self.successCodes, + self.temporaryFailCodes, + self.permanentFailCodes, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandLineTool": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "CommandLineTool": + raise ValidationException("Not a CommandLineTool") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_CommandInputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputs' field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_CommandOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outputs' field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'requirements' field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'hints' field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'cwlVersion' field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + if "intent" in _doc: + try: + intent = load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'intent' field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + ) + ) + else: + intent = None + if "baseCommand" in _doc: + try: + baseCommand = load_field( + _doc.get("baseCommand"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'baseCommand' field is not valid because:", + SourceLine(_doc, "baseCommand", str), + [e], + ) + ) + else: + baseCommand = None + if "arguments" in _doc: + try: + arguments = load_field( + _doc.get("arguments"), + union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'arguments' field is not valid because:", + SourceLine(_doc, "arguments", str), + [e], + ) + ) + else: + arguments = None + if "stdin" in _doc: + try: + stdin = load_field( + _doc.get("stdin"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'stdin' field is not valid because:", + SourceLine(_doc, "stdin", str), + [e], + ) + ) + else: + stdin = None + if "stderr" in _doc: + try: + stderr = load_field( + _doc.get("stderr"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'stderr' field is not valid because:", + SourceLine(_doc, "stderr", str), + [e], + ) + ) + else: + stderr = None + if "stdout" in _doc: + try: + stdout = load_field( + _doc.get("stdout"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'stdout' field is not valid because:", + SourceLine(_doc, "stdout", str), + [e], + ) + ) + else: + stdout = None + if "successCodes" in _doc: + try: + successCodes = load_field( + _doc.get("successCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'successCodes' field is not valid because:", + SourceLine(_doc, "successCodes", str), + [e], + ) + ) + else: + successCodes = None + if "temporaryFailCodes" in _doc: + try: + temporaryFailCodes = load_field( + _doc.get("temporaryFailCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'temporaryFailCodes' field is not valid because:", + SourceLine(_doc, "temporaryFailCodes", str), + [e], + ) + ) + else: + temporaryFailCodes = None + if "permanentFailCodes" in _doc: + try: + permanentFailCodes = load_field( + _doc.get("permanentFailCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'permanentFailCodes' field is not valid because:", + SourceLine(_doc, "permanentFailCodes", str), + [e], + ) + ) + else: + permanentFailCodes = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `baseCommand`, `arguments`, `stdin`, `stderr`, `stdout`, `successCodes`, `temporaryFailCodes`, `permanentFailCodes`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandLineTool'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + baseCommand=baseCommand, + arguments=arguments, + stdin=stdin, + stderr=stderr, + stdout=stdout, + successCodes=successCodes, + temporaryFailCodes=temporaryFailCodes, + permanentFailCodes=permanentFailCodes, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "CommandLineTool" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.intent is not None and "intent" not in r: + u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) + r["intent"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="intent", + val=r.get("intent"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.baseCommand is not None and "baseCommand" not in r: + r["baseCommand"] = save( + self.baseCommand, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="baseCommand", + val=r.get("baseCommand"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.arguments is not None and "arguments" not in r: + r["arguments"] = save( + self.arguments, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="arguments", + val=r.get("arguments"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.stdin is not None and "stdin" not in r: + r["stdin"] = save( + self.stdin, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="stdin", + val=r.get("stdin"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.stderr is not None and "stderr" not in r: + r["stderr"] = save( + self.stderr, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="stderr", + val=r.get("stderr"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.stdout is not None and "stdout" not in r: + r["stdout"] = save( + self.stdout, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="stdout", + val=r.get("stdout"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.successCodes is not None and "successCodes" not in r: + r["successCodes"] = save( + self.successCodes, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="successCodes", + val=r.get("successCodes"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.temporaryFailCodes is not None and "temporaryFailCodes" not in r: + r["temporaryFailCodes"] = save( + self.temporaryFailCodes, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="temporaryFailCodes", + val=r.get("temporaryFailCodes"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.permanentFailCodes is not None and "permanentFailCodes" not in r: + r["permanentFailCodes"] = save( + self.permanentFailCodes, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="permanentFailCodes", + val=r.get("permanentFailCodes"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + "baseCommand", + "arguments", + "stdin", + "stderr", + "stdout", + "successCodes", + "temporaryFailCodes", + "permanentFailCodes", + ] + ) + + +class DockerRequirement(ProcessRequirement): + """ + Indicates that a workflow component should be run in a + [Docker](https://docker.com) or Docker-compatible (such as + [Singularity](https://www.sylabs.io/) and [udocker](https://github.com/indigo-dc/udocker)) container environment and + specifies how to fetch or build the image. + + If a CommandLineTool lists `DockerRequirement` under + `hints` (or `requirements`), it may (or must) be run in the specified Docker + container. + + The platform must first acquire or install the correct Docker image as + specified by `dockerPull`, `dockerImport`, `dockerLoad` or `dockerFile`. + + The platform must execute the tool in the container using `docker run` with + the appropriate Docker image and tool command line. + + The workflow platform may provide input files and the designated output + directory through the use of volume bind mounts. The platform should rewrite + file paths in the input object to correspond to the Docker bind mounted + locations. That is, the platform should rewrite values in the parameter context + such as `runtime.outdir`, `runtime.tmpdir` and others to be valid paths + within the container. The platform must ensure that `runtime.outdir` and + `runtime.tmpdir` are distinct directories. + + When running a tool contained in Docker, the workflow platform must not + assume anything about the contents of the Docker container, such as the + presence or absence of specific software, except to assume that the + generated command line represents a valid command within the runtime + environment of the container. + + A container image may specify an + [ENTRYPOINT](https://docs.docker.com/engine/reference/builder/#entrypoint) + and/or + [CMD](https://docs.docker.com/engine/reference/builder/#cmd). + Command line arguments will be appended after all elements of + ENTRYPOINT, and will override all elements specified using CMD (in + other words, CMD is only used when the CommandLineTool definition + produces an empty command line). + + Use of implicit ENTRYPOINT or CMD are discouraged due to reproducibility + concerns of the implicit hidden execution point (For further discussion, see + [https://doi.org/10.12688/f1000research.15140.1](https://doi.org/10.12688/f1000research.15140.1)). Portable + CommandLineTool wrappers in which use of a container is optional must not rely on ENTRYPOINT or CMD. + CommandLineTools which do rely on ENTRYPOINT or CMD must list `DockerRequirement` in the + `requirements` section. + + ## Interaction with other requirements + + If [EnvVarRequirement](#EnvVarRequirement) is specified alongside a + DockerRequirement, the environment variables must be provided to Docker + using `--env` or `--env-file` and interact with the container's preexisting + environment as defined by Docker. + + """ + + def __init__( + self, + dockerPull: Optional[Any] = None, + dockerLoad: Optional[Any] = None, + dockerFile: Optional[Any] = None, + dockerImport: Optional[Any] = None, + dockerImageId: Optional[Any] = None, + dockerOutputDirectory: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "DockerRequirement" + self.dockerPull = dockerPull + self.dockerLoad = dockerLoad + self.dockerFile = dockerFile + self.dockerImport = dockerImport + self.dockerImageId = dockerImageId + self.dockerOutputDirectory = dockerOutputDirectory + + def __eq__(self, other: Any) -> bool: + if isinstance(other, DockerRequirement): + return bool( + self.class_ == other.class_ + and self.dockerPull == other.dockerPull + and self.dockerLoad == other.dockerLoad + and self.dockerFile == other.dockerFile + and self.dockerImport == other.dockerImport + and self.dockerImageId == other.dockerImageId + and self.dockerOutputDirectory == other.dockerOutputDirectory + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.dockerPull, + self.dockerLoad, + self.dockerFile, + self.dockerImport, + self.dockerImageId, + self.dockerOutputDirectory, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "DockerRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "DockerRequirement": + raise ValidationException("Not a DockerRequirement") + + if "dockerPull" in _doc: + try: + dockerPull = load_field( + _doc.get("dockerPull"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'dockerPull' field is not valid because:", + SourceLine(_doc, "dockerPull", str), + [e], + ) + ) + else: + dockerPull = None + if "dockerLoad" in _doc: + try: + dockerLoad = load_field( + _doc.get("dockerLoad"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'dockerLoad' field is not valid because:", + SourceLine(_doc, "dockerLoad", str), + [e], + ) + ) + else: + dockerLoad = None + if "dockerFile" in _doc: + try: + dockerFile = load_field( + _doc.get("dockerFile"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'dockerFile' field is not valid because:", + SourceLine(_doc, "dockerFile", str), + [e], + ) + ) + else: + dockerFile = None + if "dockerImport" in _doc: + try: + dockerImport = load_field( + _doc.get("dockerImport"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'dockerImport' field is not valid because:", + SourceLine(_doc, "dockerImport", str), + [e], + ) + ) + else: + dockerImport = None + if "dockerImageId" in _doc: + try: + dockerImageId = load_field( + _doc.get("dockerImageId"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'dockerImageId' field is not valid because:", + SourceLine(_doc, "dockerImageId", str), + [e], + ) + ) + else: + dockerImageId = None + if "dockerOutputDirectory" in _doc: + try: + dockerOutputDirectory = load_field( + _doc.get("dockerOutputDirectory"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'dockerOutputDirectory' field is not valid because:", + SourceLine(_doc, "dockerOutputDirectory", str), + [e], + ) + ) + else: + dockerOutputDirectory = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `dockerPull`, `dockerLoad`, `dockerFile`, `dockerImport`, `dockerImageId`, `dockerOutputDirectory`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'DockerRequirement'", None, _errors__) + _constructed = cls( + dockerPull=dockerPull, + dockerLoad=dockerLoad, + dockerFile=dockerFile, + dockerImport=dockerImport, + dockerImageId=dockerImageId, + dockerOutputDirectory=dockerOutputDirectory, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "DockerRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.dockerPull is not None and "dockerPull" not in r: + r["dockerPull"] = save( + self.dockerPull, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerPull", + val=r.get("dockerPull"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.dockerLoad is not None and "dockerLoad" not in r: + r["dockerLoad"] = save( + self.dockerLoad, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerLoad", + val=r.get("dockerLoad"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.dockerFile is not None and "dockerFile" not in r: + r["dockerFile"] = save( + self.dockerFile, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerFile", + val=r.get("dockerFile"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.dockerImport is not None and "dockerImport" not in r: + r["dockerImport"] = save( + self.dockerImport, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerImport", + val=r.get("dockerImport"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.dockerImageId is not None and "dockerImageId" not in r: + r["dockerImageId"] = save( + self.dockerImageId, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerImageId", + val=r.get("dockerImageId"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.dockerOutputDirectory is not None and "dockerOutputDirectory" not in r: + r["dockerOutputDirectory"] = save( + self.dockerOutputDirectory, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerOutputDirectory", + val=r.get("dockerOutputDirectory"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "dockerPull", + "dockerLoad", + "dockerFile", + "dockerImport", + "dockerImageId", + "dockerOutputDirectory", + ] + ) + + +class SoftwareRequirement(ProcessRequirement): + """ + A list of software packages that should be configured in the environment of + the defined process. + + """ + + def __init__( + self, + packages: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SoftwareRequirement" + self.packages = packages + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SoftwareRequirement): + return bool(self.class_ == other.class_ and self.packages == other.packages) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.packages)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SoftwareRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "SoftwareRequirement": + raise ValidationException("Not a SoftwareRequirement") + + try: + packages = load_field( + _doc.get("packages"), + idmap_packages_array_of_SoftwarePackageLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'packages' field is not valid because:", + SourceLine(_doc, "packages", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `packages`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SoftwareRequirement'", None, _errors__) + _constructed = cls( + packages=packages, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "SoftwareRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.packages is not None and "packages" not in r: + r["packages"] = save( + self.packages, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="packages", + val=r.get("packages"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "packages"]) + + +class SoftwarePackage(Saveable): + def __init__( + self, + package: Any, + version: Optional[Any] = None, + specs: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.package = package + self.version = version + self.specs = specs + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SoftwarePackage): + return bool( + self.package == other.package + and self.version == other.version + and self.specs == other.specs + ) + return False + + def __hash__(self) -> int: + return hash((self.package, self.version, self.specs)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SoftwarePackage": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + package = load_field( + _doc.get("package"), + strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'package' field is not valid because:", + SourceLine(_doc, "package", str), + [e], + ) + ) + if "version" in _doc: + try: + version = load_field( + _doc.get("version"), + union_of_None_type_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'version' field is not valid because:", + SourceLine(_doc, "version", str), + [e], + ) + ) + else: + version = None + if "specs" in _doc: + try: + specs = load_field( + _doc.get("specs"), + uri_union_of_None_type_or_array_of_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'specs' field is not valid because:", + SourceLine(_doc, "specs", str), + [e], + ) + ) + else: + specs = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `package`, `version`, `specs`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SoftwarePackage'", None, _errors__) + _constructed = cls( + package=package, + version=version, + specs=specs, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.package is not None and "package" not in r: + r["package"] = save( + self.package, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="package", + val=r.get("package"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.version is not None and "version" not in r: + r["version"] = save( + self.version, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="version", + val=r.get("version"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.specs is not None and "specs" not in r: + u = save_relative_uri(self.specs, base_url, False, None, relative_uris) + r["specs"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="specs", + val=r.get("specs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["package", "version", "specs"]) + + +class Dirent(Saveable): + """ + Define a file or subdirectory that must be staged to a particular + place prior to executing the command line tool. May be the result + of executing an expression, such as building a configuration file + from a template. + + Usually files are staged within the [designated output directory](#Runtime_environment). + However, under certain circumstances, files may be staged at + arbitrary locations, see discussion for `entryname`. + + """ + + def __init__( + self, + entry: Any, + entryname: Optional[Any] = None, + writable: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.entryname = entryname + self.entry = entry + self.writable = writable + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Dirent): + return bool( + self.entryname == other.entryname + and self.entry == other.entry + and self.writable == other.writable + ) + return False + + def __hash__(self) -> int: + return hash((self.entryname, self.entry, self.writable)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Dirent": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "entryname" in _doc: + try: + entryname = load_field( + _doc.get("entryname"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'entryname' field is not valid because:", + SourceLine(_doc, "entryname", str), + [e], + ) + ) + else: + entryname = None + try: + entry = load_field( + _doc.get("entry"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'entry' field is not valid because:", + SourceLine(_doc, "entry", str), + [e], + ) + ) + if "writable" in _doc: + try: + writable = load_field( + _doc.get("writable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'writable' field is not valid because:", + SourceLine(_doc, "writable", str), + [e], + ) + ) + else: + writable = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `entryname`, `entry`, `writable`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Dirent'", None, _errors__) + _constructed = cls( + entryname=entryname, + entry=entry, + writable=writable, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.entryname is not None and "entryname" not in r: + r["entryname"] = save( + self.entryname, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="entryname", + val=r.get("entryname"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.entry is not None and "entry" not in r: + r["entry"] = save( + self.entry, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="entry", + val=r.get("entry"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.writable is not None and "writable" not in r: + r["writable"] = save( + self.writable, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="writable", + val=r.get("writable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["entryname", "entry", "writable"]) + + +class InitialWorkDirRequirement(ProcessRequirement): + """ + Define a list of files and subdirectories that must be staged by the workflow platform prior to executing the command line tool. + Normally files are staged within the designated output directory. However, when running inside containers, files may be staged at arbitrary locations, see discussion for [`Dirent.entryname`](#Dirent). Together with `DockerRequirement.dockerOutputDirectory` it is possible to control the locations of both input and output files when running in containers. + """ + + def __init__( + self, + listing: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "InitialWorkDirRequirement" + self.listing = listing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InitialWorkDirRequirement): + return bool(self.class_ == other.class_ and self.listing == other.listing) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.listing)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InitialWorkDirRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "InitialWorkDirRequirement": + raise ValidationException("Not a InitialWorkDirRequirement") + + try: + listing = load_field( + _doc.get("listing"), + union_of_ExpressionLoader_or_array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'listing' field is not valid because:", + SourceLine(_doc, "listing", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `listing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'InitialWorkDirRequirement'", None, _errors__ + ) + _constructed = cls( + listing=listing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "InitialWorkDirRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.listing is not None and "listing" not in r: + r["listing"] = save( + self.listing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="listing", + val=r.get("listing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "listing"]) + + +class EnvVarRequirement(ProcessRequirement): + """ + Define a list of environment variables which will be set in the + execution environment of the tool. See `EnvironmentDef` for details. + + """ + + def __init__( + self, + envDef: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "EnvVarRequirement" + self.envDef = envDef + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnvVarRequirement): + return bool(self.class_ == other.class_ and self.envDef == other.envDef) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.envDef)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "EnvVarRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "EnvVarRequirement": + raise ValidationException("Not a EnvVarRequirement") + + try: + envDef = load_field( + _doc.get("envDef"), + idmap_envDef_array_of_EnvironmentDefLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'envDef' field is not valid because:", + SourceLine(_doc, "envDef", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `envDef`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'EnvVarRequirement'", None, _errors__) + _constructed = cls( + envDef=envDef, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "EnvVarRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.envDef is not None and "envDef" not in r: + r["envDef"] = save( + self.envDef, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="envDef", + val=r.get("envDef"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "envDef"]) + + +class ShellCommandRequirement(ProcessRequirement): + """ + Modify the behavior of CommandLineTool to generate a single string + containing a shell command line. Each item in the `arguments` list must + be joined into a string separated by single spaces and quoted to prevent + interpretation by the shell, unless `CommandLineBinding` for that argument + contains `shellQuote: false`. If `shellQuote: false` is specified, the + argument is joined into the command string without quoting, which allows + the use of shell metacharacters such as `|` for pipes. + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ShellCommandRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ShellCommandRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ShellCommandRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ShellCommandRequirement": + raise ValidationException("Not a ShellCommandRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'ShellCommandRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ShellCommandRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class ResourceRequirement(ProcessRequirement): + """ + Specify basic hardware resource requirements. + + "min" is the minimum amount of a resource that must be reserved to + schedule a job. If "min" cannot be satisfied, the job should not + be run. + + "max" is the maximum amount of a resource that the job shall be + allocated. If a node has sufficient resources, multiple jobs may + be scheduled on a single node provided each job's "max" resource + requirements are met. If a job attempts to exceed its resource + allocation, an implementation may deny additional resources, which + may result in job failure. + + If both "min" and "max" are specified, an implementation may + choose to allocate any amount between "min" and "max", with the + actual allocation provided in the `runtime` object. + + If "min" is specified but "max" is not, then "max" == "min" + If "max" is specified by "min" is not, then "min" == "max". + + It is an error if max < min. + + It is an error if the value of any of these fields is negative. + + If neither "min" nor "max" is specified for a resource, use the default values below. + + """ + + def __init__( + self, + coresMin: Optional[Any] = None, + coresMax: Optional[Any] = None, + ramMin: Optional[Any] = None, + ramMax: Optional[Any] = None, + tmpdirMin: Optional[Any] = None, + tmpdirMax: Optional[Any] = None, + outdirMin: Optional[Any] = None, + outdirMax: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ResourceRequirement" + self.coresMin = coresMin + self.coresMax = coresMax + self.ramMin = ramMin + self.ramMax = ramMax + self.tmpdirMin = tmpdirMin + self.tmpdirMax = tmpdirMax + self.outdirMin = outdirMin + self.outdirMax = outdirMax + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ResourceRequirement): + return bool( + self.class_ == other.class_ + and self.coresMin == other.coresMin + and self.coresMax == other.coresMax + and self.ramMin == other.ramMin + and self.ramMax == other.ramMax + and self.tmpdirMin == other.tmpdirMin + and self.tmpdirMax == other.tmpdirMax + and self.outdirMin == other.outdirMin + and self.outdirMax == other.outdirMax + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.coresMin, + self.coresMax, + self.ramMin, + self.ramMax, + self.tmpdirMin, + self.tmpdirMax, + self.outdirMin, + self.outdirMax, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ResourceRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ResourceRequirement": + raise ValidationException("Not a ResourceRequirement") + + if "coresMin" in _doc: + try: + coresMin = load_field( + _doc.get("coresMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'coresMin' field is not valid because:", + SourceLine(_doc, "coresMin", str), + [e], + ) + ) + else: + coresMin = None + if "coresMax" in _doc: + try: + coresMax = load_field( + _doc.get("coresMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'coresMax' field is not valid because:", + SourceLine(_doc, "coresMax", str), + [e], + ) + ) + else: + coresMax = None + if "ramMin" in _doc: + try: + ramMin = load_field( + _doc.get("ramMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'ramMin' field is not valid because:", + SourceLine(_doc, "ramMin", str), + [e], + ) + ) + else: + ramMin = None + if "ramMax" in _doc: + try: + ramMax = load_field( + _doc.get("ramMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'ramMax' field is not valid because:", + SourceLine(_doc, "ramMax", str), + [e], + ) + ) + else: + ramMax = None + if "tmpdirMin" in _doc: + try: + tmpdirMin = load_field( + _doc.get("tmpdirMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'tmpdirMin' field is not valid because:", + SourceLine(_doc, "tmpdirMin", str), + [e], + ) + ) + else: + tmpdirMin = None + if "tmpdirMax" in _doc: + try: + tmpdirMax = load_field( + _doc.get("tmpdirMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'tmpdirMax' field is not valid because:", + SourceLine(_doc, "tmpdirMax", str), + [e], + ) + ) + else: + tmpdirMax = None + if "outdirMin" in _doc: + try: + outdirMin = load_field( + _doc.get("outdirMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outdirMin' field is not valid because:", + SourceLine(_doc, "outdirMin", str), + [e], + ) + ) + else: + outdirMin = None + if "outdirMax" in _doc: + try: + outdirMax = load_field( + _doc.get("outdirMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outdirMax' field is not valid because:", + SourceLine(_doc, "outdirMax", str), + [e], + ) + ) + else: + outdirMax = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `coresMin`, `coresMax`, `ramMin`, `ramMax`, `tmpdirMin`, `tmpdirMax`, `outdirMin`, `outdirMax`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ResourceRequirement'", None, _errors__) + _constructed = cls( + coresMin=coresMin, + coresMax=coresMax, + ramMin=ramMin, + ramMax=ramMax, + tmpdirMin=tmpdirMin, + tmpdirMax=tmpdirMax, + outdirMin=outdirMin, + outdirMax=outdirMax, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ResourceRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.coresMin is not None and "coresMin" not in r: + r["coresMin"] = save( + self.coresMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="coresMin", + val=r.get("coresMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.coresMax is not None and "coresMax" not in r: + r["coresMax"] = save( + self.coresMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="coresMax", + val=r.get("coresMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.ramMin is not None and "ramMin" not in r: + r["ramMin"] = save( + self.ramMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="ramMin", + val=r.get("ramMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.ramMax is not None and "ramMax" not in r: + r["ramMax"] = save( + self.ramMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="ramMax", + val=r.get("ramMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.tmpdirMin is not None and "tmpdirMin" not in r: + r["tmpdirMin"] = save( + self.tmpdirMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="tmpdirMin", + val=r.get("tmpdirMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.tmpdirMax is not None and "tmpdirMax" not in r: + r["tmpdirMax"] = save( + self.tmpdirMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="tmpdirMax", + val=r.get("tmpdirMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outdirMin is not None and "outdirMin" not in r: + r["outdirMin"] = save( + self.outdirMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outdirMin", + val=r.get("outdirMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outdirMax is not None and "outdirMax" not in r: + r["outdirMax"] = save( + self.outdirMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outdirMax", + val=r.get("outdirMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "coresMin", + "coresMax", + "ramMin", + "ramMax", + "tmpdirMin", + "tmpdirMax", + "outdirMin", + "outdirMax", + ] + ) + + +class WorkReuse(ProcessRequirement): + """ + For implementations that support reusing output from past work (on + the assumption that same code and same input produce same + results), control whether to enable or disable the reuse behavior + for a particular tool or step (to accommodate situations where that + assumption is incorrect). A reused step is not executed but + instead returns the same output as the original execution. + + If `WorkReuse` is not specified, correct tools should assume it + is enabled by default. + + """ + + def __init__( + self, + enableReuse: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "WorkReuse" + self.enableReuse = enableReuse + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkReuse): + return bool( + self.class_ == other.class_ and self.enableReuse == other.enableReuse + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.enableReuse)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkReuse": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "WorkReuse": + raise ValidationException("Not a WorkReuse") + + try: + enableReuse = load_field( + _doc.get("enableReuse"), + union_of_booltype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'enableReuse' field is not valid because:", + SourceLine(_doc, "enableReuse", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `enableReuse`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkReuse'", None, _errors__) + _constructed = cls( + enableReuse=enableReuse, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "WorkReuse" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.enableReuse is not None and "enableReuse" not in r: + r["enableReuse"] = save( + self.enableReuse, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="enableReuse", + val=r.get("enableReuse"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "enableReuse"]) + + +class NetworkAccess(ProcessRequirement): + """ + Indicate whether a process requires outgoing IPv4/IPv6 network + access. Choice of IPv4 or IPv6 is implementation and site + specific, correct tools must support both. + + If `networkAccess` is false or not specified, tools must not + assume network access, except for localhost (the loopback device). + + If `networkAccess` is true, the tool must be able to make outgoing + connections to network resources. Resources may be on a private + subnet or the public Internet. However, implementations and sites + may apply their own security policies to restrict what is + accessible by the tool. + + Enabling network access does not imply a publicly routable IP + address or the ability to accept inbound connections. + + """ + + def __init__( + self, + networkAccess: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "NetworkAccess" + self.networkAccess = networkAccess + + def __eq__(self, other: Any) -> bool: + if isinstance(other, NetworkAccess): + return bool( + self.class_ == other.class_ + and self.networkAccess == other.networkAccess + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.networkAccess)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "NetworkAccess": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "NetworkAccess": + raise ValidationException("Not a NetworkAccess") + + try: + networkAccess = load_field( + _doc.get("networkAccess"), + union_of_booltype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'networkAccess' field is not valid because:", + SourceLine(_doc, "networkAccess", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `networkAccess`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'NetworkAccess'", None, _errors__) + _constructed = cls( + networkAccess=networkAccess, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "NetworkAccess" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.networkAccess is not None and "networkAccess" not in r: + r["networkAccess"] = save( + self.networkAccess, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="networkAccess", + val=r.get("networkAccess"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "networkAccess"]) + + +class InplaceUpdateRequirement(ProcessRequirement): + """ + + If `inplaceUpdate` is true, then an implementation supporting this + feature may permit tools to directly update files with `writable: + true` in InitialWorkDirRequirement. That is, as an optimization, + files may be destructively modified in place as opposed to copied + and updated. + + An implementation must ensure that only one workflow step may + access a writable file at a time. It is an error if a file which + is writable by one workflow step file is accessed (for reading or + writing) by any other workflow step running independently. + However, a file which has been updated in a previous completed + step may be used as input to multiple steps, provided it is + read-only in every step. + + Workflow steps which modify a file must produce the modified file + as output. Downstream steps which further process the file must + use the output of previous steps, and not refer to a common input + (this is necessary for both ordering and correctness). + + Workflow authors should provide this in the `hints` section. The + intent of this feature is that workflows produce the same results + whether or not InplaceUpdateRequirement is supported by the + implementation, and this feature is primarily available as an + optimization for particular environments. + + Users and implementers should be aware that workflows that + destructively modify inputs may not be repeatable or reproducible. + In particular, enabling this feature implies that WorkReuse should + not be enabled. + + """ + + def __init__( + self, + inplaceUpdate: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "InplaceUpdateRequirement" + self.inplaceUpdate = inplaceUpdate + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InplaceUpdateRequirement): + return bool( + self.class_ == other.class_ + and self.inplaceUpdate == other.inplaceUpdate + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.inplaceUpdate)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InplaceUpdateRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "InplaceUpdateRequirement": + raise ValidationException("Not a InplaceUpdateRequirement") + + try: + inplaceUpdate = load_field( + _doc.get("inplaceUpdate"), + booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inplaceUpdate' field is not valid because:", + SourceLine(_doc, "inplaceUpdate", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `inplaceUpdate`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'InplaceUpdateRequirement'", None, _errors__ + ) + _constructed = cls( + inplaceUpdate=inplaceUpdate, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "InplaceUpdateRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.inplaceUpdate is not None and "inplaceUpdate" not in r: + r["inplaceUpdate"] = save( + self.inplaceUpdate, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inplaceUpdate", + val=r.get("inplaceUpdate"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "inplaceUpdate"]) + + +class ToolTimeLimit(ProcessRequirement): + """ + Set an upper limit on the execution time of a CommandLineTool. + A CommandLineTool whose execution duration exceeds the time + limit may be preemptively terminated and considered failed. + May also be used by batch systems to make scheduling decisions. + The execution duration excludes external operations, such as + staging of files, pulling a docker image etc, and only counts + wall-time for the execution of the command line itself. + + """ + + def __init__( + self, + timelimit: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ToolTimeLimit" + self.timelimit = timelimit + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ToolTimeLimit): + return bool( + self.class_ == other.class_ and self.timelimit == other.timelimit + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.timelimit)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ToolTimeLimit": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ToolTimeLimit": + raise ValidationException("Not a ToolTimeLimit") + + try: + timelimit = load_field( + _doc.get("timelimit"), + union_of_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'timelimit' field is not valid because:", + SourceLine(_doc, "timelimit", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `timelimit`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ToolTimeLimit'", None, _errors__) + _constructed = cls( + timelimit=timelimit, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ToolTimeLimit" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.timelimit is not None and "timelimit" not in r: + r["timelimit"] = save( + self.timelimit, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="timelimit", + val=r.get("timelimit"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "timelimit"]) + + +class ExpressionToolOutputParameter(OutputParameter): + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ExpressionToolOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ExpressionToolOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'ExpressionToolOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] + ) + + +class WorkflowInputParameter(InputParameter): + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + default: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default + self.type = type + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowInputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default + and self.type == other.type + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.loadContents, + self.loadListing, + self.default, + self.type, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowInputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadListing' field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'default' field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_InputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputBinding' field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'WorkflowInputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + loadContents=loadContents, + loadListing=loadListing, + default=default, + type=type, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "loadContents", + "loadListing", + "default", + "type", + "inputBinding", + ] + ) + + +class ExpressionTool(Process): + """ + An ExpressionTool is a type of Process object that can be run by itself + or as a Workflow step. It executes a pure Javascript expression that has + access to the same input parameters as a workflow. It is meant to be used + sparingly as a way to isolate complex Javascript expressions that need to + operate on input data and produce some result; perhaps just a + rearrangement of the inputs. No Docker software container is required + or allowed. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + expression: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + intent: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_ = "ExpressionTool" + self.expression = expression + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ExpressionTool): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + and self.expression == other.expression + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + self.expression, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ExpressionTool": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ExpressionTool": + raise ValidationException("Not a ExpressionTool") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_WorkflowInputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputs' field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_ExpressionToolOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outputs' field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'requirements' field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'hints' field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'cwlVersion' field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + if "intent" in _doc: + try: + intent = load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'intent' field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + ) + ) + else: + intent = None + try: + expression = load_field( + _doc.get("expression"), + ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'expression' field is not valid because:", + SourceLine(_doc, "expression", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `expression`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ExpressionTool'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + expression=expression, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ExpressionTool" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.intent is not None and "intent" not in r: + u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) + r["intent"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="intent", + val=r.get("intent"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.expression is not None and "expression" not in r: + r["expression"] = save( + self.expression, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="expression", + val=r.get("expression"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + "expression", + ] + ) + + +class WorkflowOutputParameter(OutputParameter): + """ + Describe an output parameter of a workflow. The parameter must be + connected to one or more parameters defined in the workflow that + will provide the value of the output parameter. It is legal to + connect a WorkflowInputParameter to a WorkflowOutputParameter. + + See [WorkflowStepInput](#WorkflowStepInput) for discussion of + `linkMerge` and `pickValue`. + + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + outputSource: Optional[Any] = None, + linkMerge: Optional[Any] = None, + pickValue: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.outputSource = outputSource + self.linkMerge = linkMerge + self.pickValue = pickValue + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.outputSource == other.outputSource + and self.linkMerge == other.linkMerge + and self.pickValue == other.pickValue + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.outputSource, + self.linkMerge, + self.pickValue, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "outputSource" in _doc: + try: + outputSource = load_field( + _doc.get("outputSource"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outputSource' field is not valid because:", + SourceLine(_doc, "outputSource", str), + [e], + ) + ) + else: + outputSource = None + if "linkMerge" in _doc: + try: + linkMerge = load_field( + _doc.get("linkMerge"), + union_of_None_type_or_LinkMergeMethodLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'linkMerge' field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [e], + ) + ) + else: + linkMerge = None + if "pickValue" in _doc: + try: + pickValue = load_field( + _doc.get("pickValue"), + union_of_None_type_or_PickValueMethodLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'pickValue' field is not valid because:", + SourceLine(_doc, "pickValue", str), + [e], + ) + ) + else: + pickValue = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `outputSource`, `linkMerge`, `pickValue`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'WorkflowOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + outputSource=outputSource, + linkMerge=linkMerge, + pickValue=pickValue, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outputSource is not None and "outputSource" not in r: + u = save_relative_uri( + self.outputSource, str(self.id), False, 1, relative_uris + ) + r["outputSource"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputSource", + val=r.get("outputSource"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.linkMerge is not None and "linkMerge" not in r: + r["linkMerge"] = save( + self.linkMerge, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="linkMerge", + val=r.get("linkMerge"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.pickValue is not None and "pickValue" not in r: + r["pickValue"] = save( + self.pickValue, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="pickValue", + val=r.get("pickValue"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "outputSource", + "linkMerge", + "pickValue", + "type", + ] + ) + + +class Sink(Saveable): + pass + + +class WorkflowStepInput(Identified, Sink, LoadContents, Labeled): + """ + The input of a workflow step connects an upstream parameter (from the + workflow inputs, or the outputs of other workflows steps) with the input + parameters of the process specified by the `run` field. Only input parameters + declared by the target process will be passed through at runtime to the process + though additional parameters may be specified (for use within `valueFrom` + expressions for instance) - unconnected or unused parameters do not represent an + error condition. + + # Input object + + A WorkflowStepInput object must contain an `id` field in the form + `#fieldname` or `#prefix/fieldname`. When the `id` field contains a slash + `/` the field name consists of the characters following the final slash + (the prefix portion may contain one or more slashes to indicate scope). + This defines a field of the workflow step input object with the value of + the `source` parameter(s). + + # Merging multiple inbound data links + + To merge multiple inbound data links, + [MultipleInputFeatureRequirement](#MultipleInputFeatureRequirement) must be specified + in the workflow or workflow step requirements. + + If the sink parameter is an array, or named in a [workflow + scatter](#WorkflowStep) operation, there may be multiple inbound + data links listed in the `source` field. The values from the + input links are merged depending on the method specified in the + `linkMerge` field. If both `linkMerge` and `pickValue` are null + or not specified, and there is more than one element in the + `source` array, the default method is "merge_nested". + + If both `linkMerge` and `pickValue` are null or not specified, and + there is only a single element in the `source`, then the input + parameter takes the scalar value from the single input link (it is + *not* wrapped in a single-list). + + * **merge_nested** + + The input must be an array consisting of exactly one entry for each + input link. If "merge_nested" is specified with a single link, the value + from the link must be wrapped in a single-item list. + + * **merge_flattened** + + 1. The source and sink parameters must be compatible types, or the source + type must be compatible with single element from the "items" type of + the destination array parameter. + 2. Source parameters which are arrays are concatenated. + Source parameters which are single element types are appended as + single elements. + + # Picking non-null values among inbound data links + + If present, `pickValue` specifies how to pick non-null values among inbound data links. + + `pickValue` is evaluated + 1. Once all source values from upstream step or parameters are available. + 2. After `linkMerge`. + 3. Before `scatter` or `valueFrom`. + + This is specifically intended to be useful in combination with + [conditional execution](#WorkflowStep), where several upstream + steps may be connected to a single input (`source` is a list), and + skipped steps produce null values. + + Static type checkers should check for type consistency after inferring what the type + will be after `pickValue` is applied, just as they do currently for `linkMerge`. + + * **first_non_null** + + For the first level of a list input, pick the first non-null element. The result is a scalar. + It is an error if there is no non-null element. Examples: + * `[null, x, null, y] -> x` + * `[null, [null], null, y] -> [null]` + * `[null, null, null] -> Runtime Error` + + *Intended use case*: If-else pattern where the + value comes either from a conditional step or from a default or + fallback value. The conditional step(s) should be placed first in + the list. + + * **the_only_non_null** + + For the first level of a list input, pick the single non-null element. The result is a scalar. + It is an error if there is more than one non-null element. Examples: + + * `[null, x, null] -> x` + * `[null, x, null, y] -> Runtime Error` + * `[null, [null], null] -> [null]` + * `[null, null, null] -> Runtime Error` + + *Intended use case*: Switch type patterns where developer considers + more than one active code path as a workflow error + (possibly indicating an error in writing `when` condition expressions). + + * **all_non_null** + + For the first level of a list input, pick all non-null values. + The result is a list, which may be empty. Examples: + + * `[null, x, null] -> [x]` + * `[x, null, y] -> [x, y]` + * `[null, [x], [null]] -> [[x], [null]]` + * `[null, null, null] -> []` + + *Intended use case*: It is valid to have more than one source, but + sources are conditional, so null sources (from skipped steps) + should be filtered out. + + """ + + def __init__( + self, + id: Optional[Any] = None, + source: Optional[Any] = None, + linkMerge: Optional[Any] = None, + pickValue: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + label: Optional[Any] = None, + default: Optional[Any] = None, + valueFrom: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.source = source + self.linkMerge = linkMerge + self.pickValue = pickValue + self.loadContents = loadContents + self.loadListing = loadListing + self.label = label + self.default = default + self.valueFrom = valueFrom + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStepInput): + return bool( + self.id == other.id + and self.source == other.source + and self.linkMerge == other.linkMerge + and self.pickValue == other.pickValue + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.label == other.label + and self.default == other.default + and self.valueFrom == other.valueFrom + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.source, + self.linkMerge, + self.pickValue, + self.loadContents, + self.loadListing, + self.label, + self.default, + self.valueFrom, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowStepInput": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "source" in _doc: + try: + source = load_field( + _doc.get("source"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'source' field is not valid because:", + SourceLine(_doc, "source", str), + [e], + ) + ) + else: + source = None + if "linkMerge" in _doc: + try: + linkMerge = load_field( + _doc.get("linkMerge"), + union_of_None_type_or_LinkMergeMethodLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'linkMerge' field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [e], + ) + ) + else: + linkMerge = None + if "pickValue" in _doc: + try: + pickValue = load_field( + _doc.get("pickValue"), + union_of_None_type_or_PickValueMethodLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'pickValue' field is not valid because:", + SourceLine(_doc, "pickValue", str), + [e], + ) + ) + else: + pickValue = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadListing' field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'default' field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + if "valueFrom" in _doc: + try: + valueFrom = load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'valueFrom' field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [e], + ) + ) + else: + valueFrom = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `source`, `linkMerge`, `pickValue`, `loadContents`, `loadListing`, `label`, `default`, `valueFrom`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkflowStepInput'", None, _errors__) + _constructed = cls( + id=id, + source=source, + linkMerge=linkMerge, + pickValue=pickValue, + loadContents=loadContents, + loadListing=loadListing, + label=label, + default=default, + valueFrom=valueFrom, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.source is not None and "source" not in r: + u = save_relative_uri(self.source, str(self.id), False, 2, relative_uris) + r["source"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="source", + val=r.get("source"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.linkMerge is not None and "linkMerge" not in r: + r["linkMerge"] = save( + self.linkMerge, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="linkMerge", + val=r.get("linkMerge"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.pickValue is not None and "pickValue" not in r: + r["pickValue"] = save( + self.pickValue, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="pickValue", + val=r.get("pickValue"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.valueFrom is not None and "valueFrom" not in r: + r["valueFrom"] = save( + self.valueFrom, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="valueFrom", + val=r.get("valueFrom"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "source", + "linkMerge", + "pickValue", + "loadContents", + "loadListing", + "label", + "default", + "valueFrom", + ] + ) + + +class WorkflowStepOutput(Identified): + """ + Associate an output parameter of the underlying process with a workflow + parameter. The workflow parameter (given in the `id` field) be may be used + as a `source` to connect with input parameters of other workflow steps, or + with an output parameter of the process. + + A unique identifier for this workflow output parameter. This is + the identifier to use in the `source` field of `WorkflowStepInput` + to connect the output value to downstream parameters. + + """ + + def __init__( + self, + id: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStepOutput): + return bool(self.id == other.id) + return False + + def __hash__(self) -> int: + return hash((self.id)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowStepOutput": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkflowStepOutput'", None, _errors__) + _constructed = cls( + id=id, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["id"]) + + +class WorkflowStep(Identified, Labeled, Documented): + """ + A workflow step is an executable element of a workflow. It specifies the + underlying process implementation (such as `CommandLineTool` or another + `Workflow`) in the `run` field and connects the input and output parameters + of the underlying process to workflow parameters. + + # Scatter/gather + + To use scatter/gather, + [ScatterFeatureRequirement](#ScatterFeatureRequirement) must be specified + in the workflow or workflow step requirements. + + A "scatter" operation specifies that the associated workflow step or + subworkflow should execute separately over a list of input elements. Each + job making up a scatter operation is independent and may be executed + concurrently. + + The `scatter` field specifies one or more input parameters which will be + scattered. An input parameter may be listed more than once. The declared + type of each input parameter implicitly becomes an array of items of the + input parameter type. If a parameter is listed more than once, it becomes + a nested array. As a result, upstream parameters which are connected to + scattered parameters must be arrays. + + All output parameter types are also implicitly wrapped in arrays. Each job + in the scatter results in an entry in the output array. + + If any scattered parameter runtime value is an empty array, all outputs are + set to empty arrays and no work is done for the step, according to + applicable scattering rules. + + If `scatter` declares more than one input parameter, `scatterMethod` + describes how to decompose the input into a discrete set of jobs. + + * **dotproduct** specifies that each of the input arrays are aligned and one + element taken from each array to construct each job. It is an error + if all input arrays are not the same length. + + * **nested_crossproduct** specifies the Cartesian product of the inputs, + producing a job for every combination of the scattered inputs. The + output must be nested arrays for each level of scattering, in the + order that the input arrays are listed in the `scatter` field. + + * **flat_crossproduct** specifies the Cartesian product of the inputs, + producing a job for every combination of the scattered inputs. The + output arrays must be flattened to a single level, but otherwise listed in the + order that the input arrays are listed in the `scatter` field. + + # Conditional execution (Optional) + + Conditional execution makes execution of a step conditional on an + expression. A step that is not executed is "skipped". A skipped + step produces `null` for all output parameters. + + The condition is evaluated after `scatter`, using the input object + of each individual scatter job. This means over a set of scatter + jobs, some may be executed and some may be skipped. When the + results are gathered, skipped steps must be `null` in the output + arrays. + + The `when` field controls conditional execution. This is an + expression that must be evaluated with `inputs` bound to the step + input object (or individual scatter job), and returns a boolean + value. It is an error if this expression returns a value other + than `true` or `false`. + + Conditionals in CWL are an optional feature and are not required + to be implemented by all consumers of CWL documents. An + implementation that does not support conditionals must return a + fatal error when attempting to execute a workflow that uses + conditional constructs the implementation does not support. + + # Subworkflows + + To specify a nested workflow as part of a workflow step, + [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) must be + specified in the workflow or workflow step requirements. + + It is a fatal error if a workflow directly or indirectly invokes itself as + a subworkflow (recursive workflows are not allowed). + + """ + + def __init__( + self, + in_: Any, + out: Any, + run: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + when: Optional[Any] = None, + scatter: Optional[Any] = None, + scatterMethod: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.in_ = in_ + self.out = out + self.requirements = requirements + self.hints = hints + self.run = run + self.when = when + self.scatter = scatter + self.scatterMethod = scatterMethod + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStep): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.in_ == other.in_ + and self.out == other.out + and self.requirements == other.requirements + and self.hints == other.hints + and self.run == other.run + and self.when == other.when + and self.scatter == other.scatter + and self.scatterMethod == other.scatterMethod + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.in_, + self.out, + self.requirements, + self.hints, + self.run, + self.when, + self.scatter, + self.scatterMethod, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowStep": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + in_ = load_field( + _doc.get("in"), + idmap_in__array_of_WorkflowStepInputLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'in' field is not valid because:", + SourceLine(_doc, "in", str), + [e], + ) + ) + try: + out = load_field( + _doc.get("out"), + uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'out' field is not valid because:", + SourceLine(_doc, "out", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'requirements' field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'hints' field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + + subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) + try: + run = load_field( + _doc.get("run"), + uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_False_False_None, + subscope_baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'run' field is not valid because:", + SourceLine(_doc, "run", str), + [e], + ) + ) + if "when" in _doc: + try: + when = load_field( + _doc.get("when"), + union_of_None_type_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'when' field is not valid because:", + SourceLine(_doc, "when", str), + [e], + ) + ) + else: + when = None + if "scatter" in _doc: + try: + scatter = load_field( + _doc.get("scatter"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'scatter' field is not valid because:", + SourceLine(_doc, "scatter", str), + [e], + ) + ) + else: + scatter = None + if "scatterMethod" in _doc: + try: + scatterMethod = load_field( + _doc.get("scatterMethod"), + uri_union_of_None_type_or_ScatterMethodLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'scatterMethod' field is not valid because:", + SourceLine(_doc, "scatterMethod", str), + [e], + ) + ) + else: + scatterMethod = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `in`, `out`, `requirements`, `hints`, `run`, `when`, `scatter`, `scatterMethod`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkflowStep'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + in_=in_, + out=out, + requirements=requirements, + hints=hints, + run=run, + when=when, + scatter=scatter, + scatterMethod=scatterMethod, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.in_ is not None and "in" not in r: + r["in"] = save( + self.in_, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="in", + val=r.get("in"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.out is not None and "out" not in r: + u = save_relative_uri(self.out, str(self.id), True, None, relative_uris) + r["out"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="out", + val=r.get("out"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.run is not None and "run" not in r: + u = save_relative_uri(self.run, str(self.id), False, None, relative_uris) + r["run"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="run", + val=r.get("run"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.when is not None and "when" not in r: + r["when"] = save( + self.when, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="when", + val=r.get("when"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.scatter is not None and "scatter" not in r: + u = save_relative_uri(self.scatter, str(self.id), False, 0, relative_uris) + r["scatter"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="scatter", + val=r.get("scatter"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.scatterMethod is not None and "scatterMethod" not in r: + u = save_relative_uri( + self.scatterMethod, str(self.id), False, None, relative_uris + ) + r["scatterMethod"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="scatterMethod", + val=r.get("scatterMethod"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "in", + "out", + "requirements", + "hints", + "run", + "when", + "scatter", + "scatterMethod", + ] + ) + + +class Workflow(Process): + """ + A workflow describes a set of **steps** and the **dependencies** between + those steps. When a step produces output that will be consumed by a + second step, the first step is a dependency of the second step. + + When there is a dependency, the workflow engine must execute the preceding + step and wait for it to successfully produce output before executing the + dependent step. If two steps are defined in the workflow graph that + are not directly or indirectly dependent, these steps are **independent**, + and may execute in any order or execute concurrently. A workflow is + complete when all steps have been executed. + + Dependencies between parameters are expressed using the `source` + field on [workflow step input parameters](#WorkflowStepInput) and + `outputSource` field on [workflow output + parameters](#WorkflowOutputParameter). + + The `source` field on each workflow step input parameter expresses + the data links that contribute to the value of the step input + parameter (the "sink"). A workflow step can only begin execution + when every data link connected to a step has been fulfilled. + + The `outputSource` field on each workflow step input parameter + expresses the data links that contribute to the value of the + workflow output parameter (the "sink"). Workflow execution cannot + complete successfully until every data link connected to an output + parameter has been fulfilled. + + ## Workflow success and failure + + A completed step must result in one of `success`, `temporaryFailure` or + `permanentFailure` states. An implementation may choose to retry a step + execution which resulted in `temporaryFailure`. An implementation may + choose to either continue running other steps of a workflow, or terminate + immediately upon `permanentFailure`. + + * If any step of a workflow execution results in `permanentFailure`, then + the workflow status is `permanentFailure`. + + * If one or more steps result in `temporaryFailure` and all other steps + complete `success` or are not executed, then the workflow status is + `temporaryFailure`. + + * If all workflow steps are executed and complete with `success`, then the + workflow status is `success`. + + # Extensions + + [ScatterFeatureRequirement](#ScatterFeatureRequirement) and + [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) are + available as standard [extensions](#Extensions_and_Metadata) to core + workflow semantics. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + steps: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + intent: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_ = "Workflow" + self.steps = steps + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Workflow): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + and self.steps == other.steps + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + self.steps, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Workflow": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "Workflow": + raise ValidationException("Not a Workflow") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_WorkflowInputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputs' field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_WorkflowOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outputs' field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'requirements' field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'hints' field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'cwlVersion' field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + if "intent" in _doc: + try: + intent = load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'intent' field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + ) + ) + else: + intent = None + try: + steps = load_field( + _doc.get("steps"), + idmap_steps_union_of_array_of_WorkflowStepLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'steps' field is not valid because:", + SourceLine(_doc, "steps", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `steps`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Workflow'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + steps=steps, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "Workflow" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.intent is not None and "intent" not in r: + u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) + r["intent"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="intent", + val=r.get("intent"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.steps is not None and "steps" not in r: + r["steps"] = save( + self.steps, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="steps", + val=r.get("steps"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + "steps", + ] + ) + + +class SubworkflowFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support nested workflows in + the `run` field of [WorkflowStep](#WorkflowStep). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SubworkflowFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SubworkflowFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SubworkflowFeatureRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "SubworkflowFeatureRequirement": + raise ValidationException("Not a SubworkflowFeatureRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'SubworkflowFeatureRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "SubworkflowFeatureRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class ScatterFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support the `scatter` and + `scatterMethod` fields of [WorkflowStep](#WorkflowStep). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ScatterFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ScatterFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ScatterFeatureRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ScatterFeatureRequirement": + raise ValidationException("Not a ScatterFeatureRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'ScatterFeatureRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ScatterFeatureRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class MultipleInputFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support multiple inbound data links + listed in the `source` field of [WorkflowStepInput](#WorkflowStepInput). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "MultipleInputFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, MultipleInputFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "MultipleInputFeatureRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "MultipleInputFeatureRequirement": + raise ValidationException("Not a MultipleInputFeatureRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'MultipleInputFeatureRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "MultipleInputFeatureRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class StepInputExpressionRequirement(ProcessRequirement): + """ + Indicate that the workflow platform must support the `valueFrom` field + of [WorkflowStepInput](#WorkflowStepInput). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "StepInputExpressionRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, StepInputExpressionRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "StepInputExpressionRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "StepInputExpressionRequirement": + raise ValidationException("Not a StepInputExpressionRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'StepInputExpressionRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "StepInputExpressionRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class OperationInputParameter(InputParameter): + """ + Describe an input parameter of an operation. + + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + default: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OperationInputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.loadContents, + self.loadListing, + self.default, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OperationInputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadListing' field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'default' field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'OperationInputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + loadContents=loadContents, + loadListing=loadListing, + default=default, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "loadContents", + "loadListing", + "default", + "type", + ] + ) + + +class OperationOutputParameter(OutputParameter): + """ + Describe an output parameter of an operation. + + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OperationOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OperationOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'OperationOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] + ) + + +class Operation(Process): + """ + This record describes an abstract operation. It is a potential + step of a workflow that has not yet been bound to a concrete + implementation. It specifies an input and output signature, but + does not provide enough information to be executed. An + implementation (or other tooling) may provide a means of binding + an Operation to a concrete process (such as Workflow, + CommandLineTool, or ExpressionTool) with a compatible signature. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + intent: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_ = "Operation" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Operation): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Operation": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "Operation": + raise ValidationException("Not a Operation") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_OperationInputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputs' field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_OperationOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outputs' field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'requirements' field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'hints' field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'cwlVersion' field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + if "intent" in _doc: + try: + intent = load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'intent' field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + ) + ) + else: + intent = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Operation'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "Operation" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.intent is not None and "intent" not in r: + u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) + r["intent"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="intent", + val=r.get("intent"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + ] + ) + + +_vocab = { + "Any": "https://w3id.org/cwl/salad#Any", + "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", + "CWLType": "https://w3id.org/cwl/cwl#CWLType", + "CWLVersion": "https://w3id.org/cwl/cwl#CWLVersion", + "CommandInputArraySchema": "https://w3id.org/cwl/cwl#CommandInputArraySchema", + "CommandInputEnumSchema": "https://w3id.org/cwl/cwl#CommandInputEnumSchema", + "CommandInputParameter": "https://w3id.org/cwl/cwl#CommandInputParameter", + "CommandInputRecordField": "https://w3id.org/cwl/cwl#CommandInputRecordField", + "CommandInputRecordSchema": "https://w3id.org/cwl/cwl#CommandInputRecordSchema", + "CommandInputSchema": "https://w3id.org/cwl/cwl#CommandInputSchema", + "CommandLineBindable": "https://w3id.org/cwl/cwl#CommandLineBindable", + "CommandLineBinding": "https://w3id.org/cwl/cwl#CommandLineBinding", + "CommandLineTool": "https://w3id.org/cwl/cwl#CommandLineTool", + "CommandOutputArraySchema": "https://w3id.org/cwl/cwl#CommandOutputArraySchema", + "CommandOutputBinding": "https://w3id.org/cwl/cwl#CommandOutputBinding", + "CommandOutputEnumSchema": "https://w3id.org/cwl/cwl#CommandOutputEnumSchema", + "CommandOutputParameter": "https://w3id.org/cwl/cwl#CommandOutputParameter", + "CommandOutputRecordField": "https://w3id.org/cwl/cwl#CommandOutputRecordField", + "CommandOutputRecordSchema": "https://w3id.org/cwl/cwl#CommandOutputRecordSchema", + "Directory": "https://w3id.org/cwl/cwl#Directory", + "Dirent": "https://w3id.org/cwl/cwl#Dirent", + "DockerRequirement": "https://w3id.org/cwl/cwl#DockerRequirement", + "Documented": "https://w3id.org/cwl/salad#Documented", + "EnumSchema": "https://w3id.org/cwl/salad#EnumSchema", + "EnvVarRequirement": "https://w3id.org/cwl/cwl#EnvVarRequirement", + "EnvironmentDef": "https://w3id.org/cwl/cwl#EnvironmentDef", + "Expression": "https://w3id.org/cwl/cwl#Expression", + "ExpressionPlaceholder": "https://w3id.org/cwl/cwl#ExpressionPlaceholder", + "ExpressionTool": "https://w3id.org/cwl/cwl#ExpressionTool", + "ExpressionToolOutputParameter": "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter", + "FieldBase": "https://w3id.org/cwl/cwl#FieldBase", + "File": "https://w3id.org/cwl/cwl#File", + "IOSchema": "https://w3id.org/cwl/cwl#IOSchema", + "Identified": "https://w3id.org/cwl/cwl#Identified", + "InitialWorkDirRequirement": "https://w3id.org/cwl/cwl#InitialWorkDirRequirement", + "InlineJavascriptRequirement": "https://w3id.org/cwl/cwl#InlineJavascriptRequirement", + "InplaceUpdateRequirement": "https://w3id.org/cwl/cwl#InplaceUpdateRequirement", + "InputArraySchema": "https://w3id.org/cwl/cwl#InputArraySchema", + "InputBinding": "https://w3id.org/cwl/cwl#InputBinding", + "InputEnumSchema": "https://w3id.org/cwl/cwl#InputEnumSchema", + "InputFormat": "https://w3id.org/cwl/cwl#InputFormat", + "InputParameter": "https://w3id.org/cwl/cwl#InputParameter", + "InputRecordField": "https://w3id.org/cwl/cwl#InputRecordField", + "InputRecordSchema": "https://w3id.org/cwl/cwl#InputRecordSchema", + "InputSchema": "https://w3id.org/cwl/cwl#InputSchema", + "Labeled": "https://w3id.org/cwl/cwl#Labeled", + "LinkMergeMethod": "https://w3id.org/cwl/cwl#LinkMergeMethod", + "LoadContents": "https://w3id.org/cwl/cwl#LoadContents", + "LoadListingEnum": "https://w3id.org/cwl/cwl#LoadListingEnum", + "LoadListingRequirement": "https://w3id.org/cwl/cwl#LoadListingRequirement", + "MultipleInputFeatureRequirement": "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement", + "NetworkAccess": "https://w3id.org/cwl/cwl#NetworkAccess", + "Operation": "https://w3id.org/cwl/cwl#Operation", + "OperationInputParameter": "https://w3id.org/cwl/cwl#OperationInputParameter", + "OperationOutputParameter": "https://w3id.org/cwl/cwl#OperationOutputParameter", + "OutputArraySchema": "https://w3id.org/cwl/cwl#OutputArraySchema", + "OutputEnumSchema": "https://w3id.org/cwl/cwl#OutputEnumSchema", + "OutputFormat": "https://w3id.org/cwl/cwl#OutputFormat", + "OutputParameter": "https://w3id.org/cwl/cwl#OutputParameter", + "OutputRecordField": "https://w3id.org/cwl/cwl#OutputRecordField", + "OutputRecordSchema": "https://w3id.org/cwl/cwl#OutputRecordSchema", + "OutputSchema": "https://w3id.org/cwl/cwl#OutputSchema", + "Parameter": "https://w3id.org/cwl/cwl#Parameter", + "PickValueMethod": "https://w3id.org/cwl/cwl#PickValueMethod", + "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", + "Process": "https://w3id.org/cwl/cwl#Process", + "ProcessRequirement": "https://w3id.org/cwl/cwl#ProcessRequirement", + "RecordField": "https://w3id.org/cwl/salad#RecordField", + "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", + "ResourceRequirement": "https://w3id.org/cwl/cwl#ResourceRequirement", + "ScatterFeatureRequirement": "https://w3id.org/cwl/cwl#ScatterFeatureRequirement", + "ScatterMethod": "https://w3id.org/cwl/cwl#ScatterMethod", + "SchemaDefRequirement": "https://w3id.org/cwl/cwl#SchemaDefRequirement", + "SecondaryFileSchema": "https://w3id.org/cwl/cwl#SecondaryFileSchema", + "ShellCommandRequirement": "https://w3id.org/cwl/cwl#ShellCommandRequirement", + "Sink": "https://w3id.org/cwl/cwl#Sink", + "SoftwarePackage": "https://w3id.org/cwl/cwl#SoftwarePackage", + "SoftwareRequirement": "https://w3id.org/cwl/cwl#SoftwareRequirement", + "StepInputExpressionRequirement": "https://w3id.org/cwl/cwl#StepInputExpressionRequirement", + "SubworkflowFeatureRequirement": "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement", + "ToolTimeLimit": "https://w3id.org/cwl/cwl#ToolTimeLimit", + "WorkReuse": "https://w3id.org/cwl/cwl#WorkReuse", + "Workflow": "https://w3id.org/cwl/cwl#Workflow", + "WorkflowInputParameter": "https://w3id.org/cwl/cwl#WorkflowInputParameter", + "WorkflowOutputParameter": "https://w3id.org/cwl/cwl#WorkflowOutputParameter", + "WorkflowStep": "https://w3id.org/cwl/cwl#WorkflowStep", + "WorkflowStepInput": "https://w3id.org/cwl/cwl#WorkflowStepInput", + "WorkflowStepOutput": "https://w3id.org/cwl/cwl#WorkflowStepOutput", + "all_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/all_non_null", + "array": "https://w3id.org/cwl/salad#array", + "boolean": "http://www.w3.org/2001/XMLSchema#boolean", + "deep_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing", + "dotproduct": "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct", + "double": "http://www.w3.org/2001/XMLSchema#double", + "draft-2": "https://w3id.org/cwl/cwl#draft-2", + "draft-3": "https://w3id.org/cwl/cwl#draft-3", + "draft-3.dev1": "https://w3id.org/cwl/cwl#draft-3.dev1", + "draft-3.dev2": "https://w3id.org/cwl/cwl#draft-3.dev2", + "draft-3.dev3": "https://w3id.org/cwl/cwl#draft-3.dev3", + "draft-3.dev4": "https://w3id.org/cwl/cwl#draft-3.dev4", + "draft-3.dev5": "https://w3id.org/cwl/cwl#draft-3.dev5", + "draft-4.dev1": "https://w3id.org/cwl/cwl#draft-4.dev1", + "draft-4.dev2": "https://w3id.org/cwl/cwl#draft-4.dev2", + "draft-4.dev3": "https://w3id.org/cwl/cwl#draft-4.dev3", + "enum": "https://w3id.org/cwl/salad#enum", + "first_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/first_non_null", + "flat_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct", + "float": "http://www.w3.org/2001/XMLSchema#float", + "int": "http://www.w3.org/2001/XMLSchema#int", + "long": "http://www.w3.org/2001/XMLSchema#long", + "merge_flattened": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened", + "merge_nested": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested", + "nested_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct", + "no_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/no_listing", + "null": "https://w3id.org/cwl/salad#null", + "record": "https://w3id.org/cwl/salad#record", + "shallow_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/shallow_listing", + "stderr": "https://w3id.org/cwl/cwl#stderr", + "stdin": "https://w3id.org/cwl/cwl#stdin", + "stdout": "https://w3id.org/cwl/cwl#stdout", + "string": "http://www.w3.org/2001/XMLSchema#string", + "the_only_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/the_only_non_null", + "v1.0": "https://w3id.org/cwl/cwl#v1.0", + "v1.0.dev4": "https://w3id.org/cwl/cwl#v1.0.dev4", + "v1.1": "https://w3id.org/cwl/cwl#v1.1", + "v1.1.0-dev1": "https://w3id.org/cwl/cwl#v1.1.0-dev1", + "v1.2": "https://w3id.org/cwl/cwl#v1.2", + "v1.2.0-dev1": "https://w3id.org/cwl/cwl#v1.2.0-dev1", + "v1.2.0-dev2": "https://w3id.org/cwl/cwl#v1.2.0-dev2", + "v1.2.0-dev3": "https://w3id.org/cwl/cwl#v1.2.0-dev3", + "v1.2.0-dev4": "https://w3id.org/cwl/cwl#v1.2.0-dev4", + "v1.2.0-dev5": "https://w3id.org/cwl/cwl#v1.2.0-dev5", +} +_rvocab = { + "https://w3id.org/cwl/salad#Any": "Any", + "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", + "https://w3id.org/cwl/cwl#CWLType": "CWLType", + "https://w3id.org/cwl/cwl#CWLVersion": "CWLVersion", + "https://w3id.org/cwl/cwl#CommandInputArraySchema": "CommandInputArraySchema", + "https://w3id.org/cwl/cwl#CommandInputEnumSchema": "CommandInputEnumSchema", + "https://w3id.org/cwl/cwl#CommandInputParameter": "CommandInputParameter", + "https://w3id.org/cwl/cwl#CommandInputRecordField": "CommandInputRecordField", + "https://w3id.org/cwl/cwl#CommandInputRecordSchema": "CommandInputRecordSchema", + "https://w3id.org/cwl/cwl#CommandInputSchema": "CommandInputSchema", + "https://w3id.org/cwl/cwl#CommandLineBindable": "CommandLineBindable", + "https://w3id.org/cwl/cwl#CommandLineBinding": "CommandLineBinding", + "https://w3id.org/cwl/cwl#CommandLineTool": "CommandLineTool", + "https://w3id.org/cwl/cwl#CommandOutputArraySchema": "CommandOutputArraySchema", + "https://w3id.org/cwl/cwl#CommandOutputBinding": "CommandOutputBinding", + "https://w3id.org/cwl/cwl#CommandOutputEnumSchema": "CommandOutputEnumSchema", + "https://w3id.org/cwl/cwl#CommandOutputParameter": "CommandOutputParameter", + "https://w3id.org/cwl/cwl#CommandOutputRecordField": "CommandOutputRecordField", + "https://w3id.org/cwl/cwl#CommandOutputRecordSchema": "CommandOutputRecordSchema", + "https://w3id.org/cwl/cwl#Directory": "Directory", + "https://w3id.org/cwl/cwl#Dirent": "Dirent", + "https://w3id.org/cwl/cwl#DockerRequirement": "DockerRequirement", + "https://w3id.org/cwl/salad#Documented": "Documented", + "https://w3id.org/cwl/salad#EnumSchema": "EnumSchema", + "https://w3id.org/cwl/cwl#EnvVarRequirement": "EnvVarRequirement", + "https://w3id.org/cwl/cwl#EnvironmentDef": "EnvironmentDef", + "https://w3id.org/cwl/cwl#Expression": "Expression", + "https://w3id.org/cwl/cwl#ExpressionPlaceholder": "ExpressionPlaceholder", + "https://w3id.org/cwl/cwl#ExpressionTool": "ExpressionTool", + "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter": "ExpressionToolOutputParameter", + "https://w3id.org/cwl/cwl#FieldBase": "FieldBase", + "https://w3id.org/cwl/cwl#File": "File", + "https://w3id.org/cwl/cwl#IOSchema": "IOSchema", + "https://w3id.org/cwl/cwl#Identified": "Identified", + "https://w3id.org/cwl/cwl#InitialWorkDirRequirement": "InitialWorkDirRequirement", + "https://w3id.org/cwl/cwl#InlineJavascriptRequirement": "InlineJavascriptRequirement", + "https://w3id.org/cwl/cwl#InplaceUpdateRequirement": "InplaceUpdateRequirement", + "https://w3id.org/cwl/cwl#InputArraySchema": "InputArraySchema", + "https://w3id.org/cwl/cwl#InputBinding": "InputBinding", + "https://w3id.org/cwl/cwl#InputEnumSchema": "InputEnumSchema", + "https://w3id.org/cwl/cwl#InputFormat": "InputFormat", + "https://w3id.org/cwl/cwl#InputParameter": "InputParameter", + "https://w3id.org/cwl/cwl#InputRecordField": "InputRecordField", + "https://w3id.org/cwl/cwl#InputRecordSchema": "InputRecordSchema", + "https://w3id.org/cwl/cwl#InputSchema": "InputSchema", + "https://w3id.org/cwl/cwl#Labeled": "Labeled", + "https://w3id.org/cwl/cwl#LinkMergeMethod": "LinkMergeMethod", + "https://w3id.org/cwl/cwl#LoadContents": "LoadContents", + "https://w3id.org/cwl/cwl#LoadListingEnum": "LoadListingEnum", + "https://w3id.org/cwl/cwl#LoadListingRequirement": "LoadListingRequirement", + "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement": "MultipleInputFeatureRequirement", + "https://w3id.org/cwl/cwl#NetworkAccess": "NetworkAccess", + "https://w3id.org/cwl/cwl#Operation": "Operation", + "https://w3id.org/cwl/cwl#OperationInputParameter": "OperationInputParameter", + "https://w3id.org/cwl/cwl#OperationOutputParameter": "OperationOutputParameter", + "https://w3id.org/cwl/cwl#OutputArraySchema": "OutputArraySchema", + "https://w3id.org/cwl/cwl#OutputEnumSchema": "OutputEnumSchema", + "https://w3id.org/cwl/cwl#OutputFormat": "OutputFormat", + "https://w3id.org/cwl/cwl#OutputParameter": "OutputParameter", + "https://w3id.org/cwl/cwl#OutputRecordField": "OutputRecordField", + "https://w3id.org/cwl/cwl#OutputRecordSchema": "OutputRecordSchema", + "https://w3id.org/cwl/cwl#OutputSchema": "OutputSchema", + "https://w3id.org/cwl/cwl#Parameter": "Parameter", + "https://w3id.org/cwl/cwl#PickValueMethod": "PickValueMethod", + "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", + "https://w3id.org/cwl/cwl#Process": "Process", + "https://w3id.org/cwl/cwl#ProcessRequirement": "ProcessRequirement", + "https://w3id.org/cwl/salad#RecordField": "RecordField", + "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", + "https://w3id.org/cwl/cwl#ResourceRequirement": "ResourceRequirement", + "https://w3id.org/cwl/cwl#ScatterFeatureRequirement": "ScatterFeatureRequirement", + "https://w3id.org/cwl/cwl#ScatterMethod": "ScatterMethod", + "https://w3id.org/cwl/cwl#SchemaDefRequirement": "SchemaDefRequirement", + "https://w3id.org/cwl/cwl#SecondaryFileSchema": "SecondaryFileSchema", + "https://w3id.org/cwl/cwl#ShellCommandRequirement": "ShellCommandRequirement", + "https://w3id.org/cwl/cwl#Sink": "Sink", + "https://w3id.org/cwl/cwl#SoftwarePackage": "SoftwarePackage", + "https://w3id.org/cwl/cwl#SoftwareRequirement": "SoftwareRequirement", + "https://w3id.org/cwl/cwl#StepInputExpressionRequirement": "StepInputExpressionRequirement", + "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement": "SubworkflowFeatureRequirement", + "https://w3id.org/cwl/cwl#ToolTimeLimit": "ToolTimeLimit", + "https://w3id.org/cwl/cwl#WorkReuse": "WorkReuse", + "https://w3id.org/cwl/cwl#Workflow": "Workflow", + "https://w3id.org/cwl/cwl#WorkflowInputParameter": "WorkflowInputParameter", + "https://w3id.org/cwl/cwl#WorkflowOutputParameter": "WorkflowOutputParameter", + "https://w3id.org/cwl/cwl#WorkflowStep": "WorkflowStep", + "https://w3id.org/cwl/cwl#WorkflowStepInput": "WorkflowStepInput", + "https://w3id.org/cwl/cwl#WorkflowStepOutput": "WorkflowStepOutput", + "https://w3id.org/cwl/cwl#PickValueMethod/all_non_null": "all_non_null", + "https://w3id.org/cwl/salad#array": "array", + "http://www.w3.org/2001/XMLSchema#boolean": "boolean", + "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing": "deep_listing", + "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct": "dotproduct", + "http://www.w3.org/2001/XMLSchema#double": "double", + "https://w3id.org/cwl/cwl#draft-2": "draft-2", + "https://w3id.org/cwl/cwl#draft-3": "draft-3", + "https://w3id.org/cwl/cwl#draft-3.dev1": "draft-3.dev1", + "https://w3id.org/cwl/cwl#draft-3.dev2": "draft-3.dev2", + "https://w3id.org/cwl/cwl#draft-3.dev3": "draft-3.dev3", + "https://w3id.org/cwl/cwl#draft-3.dev4": "draft-3.dev4", + "https://w3id.org/cwl/cwl#draft-3.dev5": "draft-3.dev5", + "https://w3id.org/cwl/cwl#draft-4.dev1": "draft-4.dev1", + "https://w3id.org/cwl/cwl#draft-4.dev2": "draft-4.dev2", + "https://w3id.org/cwl/cwl#draft-4.dev3": "draft-4.dev3", + "https://w3id.org/cwl/salad#enum": "enum", + "https://w3id.org/cwl/cwl#PickValueMethod/first_non_null": "first_non_null", + "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct": "flat_crossproduct", + "http://www.w3.org/2001/XMLSchema#float": "float", + "http://www.w3.org/2001/XMLSchema#int": "int", + "http://www.w3.org/2001/XMLSchema#long": "long", + "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened": "merge_flattened", + "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested": "merge_nested", + "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct": "nested_crossproduct", + "https://w3id.org/cwl/cwl#LoadListingEnum/no_listing": "no_listing", + "https://w3id.org/cwl/salad#null": "null", + "https://w3id.org/cwl/salad#record": "record", + "https://w3id.org/cwl/cwl#LoadListingEnum/shallow_listing": "shallow_listing", + "https://w3id.org/cwl/cwl#stderr": "stderr", + "https://w3id.org/cwl/cwl#stdin": "stdin", + "https://w3id.org/cwl/cwl#stdout": "stdout", + "http://www.w3.org/2001/XMLSchema#string": "string", + "https://w3id.org/cwl/cwl#PickValueMethod/the_only_non_null": "the_only_non_null", + "https://w3id.org/cwl/cwl#v1.0": "v1.0", + "https://w3id.org/cwl/cwl#v1.0.dev4": "v1.0.dev4", + "https://w3id.org/cwl/cwl#v1.1": "v1.1", + "https://w3id.org/cwl/cwl#v1.1.0-dev1": "v1.1.0-dev1", + "https://w3id.org/cwl/cwl#v1.2": "v1.2", + "https://w3id.org/cwl/cwl#v1.2.0-dev1": "v1.2.0-dev1", + "https://w3id.org/cwl/cwl#v1.2.0-dev2": "v1.2.0-dev2", + "https://w3id.org/cwl/cwl#v1.2.0-dev3": "v1.2.0-dev3", + "https://w3id.org/cwl/cwl#v1.2.0-dev4": "v1.2.0-dev4", + "https://w3id.org/cwl/cwl#v1.2.0-dev5": "v1.2.0-dev5", +} + +strtype = _PrimitiveLoader(str) +inttype = _PrimitiveLoader(int) +floattype = _PrimitiveLoader(float) +booltype = _PrimitiveLoader(bool) +None_type = _PrimitiveLoader(type(None)) +Any_type = _AnyLoader() +PrimitiveTypeLoader = _EnumLoader( + ( + "null", + "boolean", + "int", + "long", + "float", + "double", + "string", + ), + "PrimitiveType", +) +""" +Names of salad data types (based on Avro schema declarations). + +Refer to the [Avro schema declaration documentation](https://avro.apache.org/docs/current/spec.html#schemas) for +detailed information. + +null: no value +boolean: a binary value +int: 32-bit signed integer +long: 64-bit signed integer +float: single precision (32-bit) IEEE 754 floating-point number +double: double precision (64-bit) IEEE 754 floating-point number +string: Unicode character sequence +""" +AnyLoader = _EnumLoader(("Any",), "Any") +""" +The **Any** type validates for any non-null value. +""" +RecordFieldLoader = _RecordLoader(RecordField) +RecordSchemaLoader = _RecordLoader(RecordSchema) +EnumSchemaLoader = _RecordLoader(EnumSchema) +ArraySchemaLoader = _RecordLoader(ArraySchema) +CWLVersionLoader = _EnumLoader( + ( + "draft-2", + "draft-3.dev1", + "draft-3.dev2", + "draft-3.dev3", + "draft-3.dev4", + "draft-3.dev5", + "draft-3", + "draft-4.dev1", + "draft-4.dev2", + "draft-4.dev3", + "v1.0.dev4", + "v1.0", + "v1.1.0-dev1", + "v1.1", + "v1.2.0-dev1", + "v1.2.0-dev2", + "v1.2.0-dev3", + "v1.2.0-dev4", + "v1.2.0-dev5", + "v1.2", + ), + "CWLVersion", +) +""" +Version symbols for published CWL document versions. +""" +CWLTypeLoader = _EnumLoader( + ( + "null", + "boolean", + "int", + "long", + "float", + "double", + "string", + "File", + "Directory", + ), + "CWLType", +) +""" +Extends primitive types with the concept of a file and directory as a builtin type. +File: A File object +Directory: A Directory object +""" +FileLoader = _RecordLoader(File) +DirectoryLoader = _RecordLoader(Directory) +LoadListingEnumLoader = _EnumLoader( + ( + "no_listing", + "shallow_listing", + "deep_listing", + ), + "LoadListingEnum", +) +""" +Specify the desired behavior for loading the `listing` field of +a Directory object for use by expressions. + +no_listing: Do not load the directory listing. +shallow_listing: Only load the top level listing, do not recurse into subdirectories. +deep_listing: Load the directory listing and recursively load all subdirectories as well. +""" +ExpressionLoader = _ExpressionLoader(str) +InputBindingLoader = _RecordLoader(InputBinding) +InputRecordFieldLoader = _RecordLoader(InputRecordField) +InputRecordSchemaLoader = _RecordLoader(InputRecordSchema) +InputEnumSchemaLoader = _RecordLoader(InputEnumSchema) +InputArraySchemaLoader = _RecordLoader(InputArraySchema) +OutputRecordFieldLoader = _RecordLoader(OutputRecordField) +OutputRecordSchemaLoader = _RecordLoader(OutputRecordSchema) +OutputEnumSchemaLoader = _RecordLoader(OutputEnumSchema) +OutputArraySchemaLoader = _RecordLoader(OutputArraySchema) +InlineJavascriptRequirementLoader = _RecordLoader(InlineJavascriptRequirement) +SchemaDefRequirementLoader = _RecordLoader(SchemaDefRequirement) +SecondaryFileSchemaLoader = _RecordLoader(SecondaryFileSchema) +LoadListingRequirementLoader = _RecordLoader(LoadListingRequirement) +EnvironmentDefLoader = _RecordLoader(EnvironmentDef) +CommandLineBindingLoader = _RecordLoader(CommandLineBinding) +CommandOutputBindingLoader = _RecordLoader(CommandOutputBinding) +CommandLineBindableLoader = _RecordLoader(CommandLineBindable) +CommandInputRecordFieldLoader = _RecordLoader(CommandInputRecordField) +CommandInputRecordSchemaLoader = _RecordLoader(CommandInputRecordSchema) +CommandInputEnumSchemaLoader = _RecordLoader(CommandInputEnumSchema) +CommandInputArraySchemaLoader = _RecordLoader(CommandInputArraySchema) +CommandOutputRecordFieldLoader = _RecordLoader(CommandOutputRecordField) +CommandOutputRecordSchemaLoader = _RecordLoader(CommandOutputRecordSchema) +CommandOutputEnumSchemaLoader = _RecordLoader(CommandOutputEnumSchema) +CommandOutputArraySchemaLoader = _RecordLoader(CommandOutputArraySchema) +CommandInputParameterLoader = _RecordLoader(CommandInputParameter) +CommandOutputParameterLoader = _RecordLoader(CommandOutputParameter) +stdinLoader = _EnumLoader(("stdin",), "stdin") +""" +Only valid as a `type` for a `CommandLineTool` input with no +`inputBinding` set. `stdin` must not be specified at the `CommandLineTool` +level. + +The following +``` +inputs: + an_input_name: + type: stdin +``` +is equivalent to +``` +inputs: + an_input_name: + type: File + streamable: true + +stdin: $(inputs.an_input_name.path) +``` +""" +stdoutLoader = _EnumLoader(("stdout",), "stdout") +""" +Only valid as a `type` for a `CommandLineTool` output with no +`outputBinding` set. + +The following +``` +outputs: + an_output_name: + type: stdout + +stdout: a_stdout_file +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stdout_file + +stdout: a_stdout_file +``` + +If there is no `stdout` name provided, a random filename will be created. +For example, the following +``` +outputs: + an_output_name: + type: stdout +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stdout_filenameABCDEFG + +stdout: random_stdout_filenameABCDEFG +``` + +If the `CommandLineTool` contains logically chained commands +(e.g. `echo a && echo b`) `stdout` must include the output of +every command. +""" +stderrLoader = _EnumLoader(("stderr",), "stderr") +""" +Only valid as a `type` for a `CommandLineTool` output with no +`outputBinding` set. + +The following +``` +outputs: + an_output_name: + type: stderr + +stderr: a_stderr_file +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stderr_file + +stderr: a_stderr_file +``` + +If there is no `stderr` name provided, a random filename will be created. +For example, the following +``` +outputs: + an_output_name: + type: stderr +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stderr_filenameABCDEFG + +stderr: random_stderr_filenameABCDEFG +``` +""" +CommandLineToolLoader = _RecordLoader(CommandLineTool) +DockerRequirementLoader = _RecordLoader(DockerRequirement) +SoftwareRequirementLoader = _RecordLoader(SoftwareRequirement) +SoftwarePackageLoader = _RecordLoader(SoftwarePackage) +DirentLoader = _RecordLoader(Dirent) +InitialWorkDirRequirementLoader = _RecordLoader(InitialWorkDirRequirement) +EnvVarRequirementLoader = _RecordLoader(EnvVarRequirement) +ShellCommandRequirementLoader = _RecordLoader(ShellCommandRequirement) +ResourceRequirementLoader = _RecordLoader(ResourceRequirement) +WorkReuseLoader = _RecordLoader(WorkReuse) +NetworkAccessLoader = _RecordLoader(NetworkAccess) +InplaceUpdateRequirementLoader = _RecordLoader(InplaceUpdateRequirement) +ToolTimeLimitLoader = _RecordLoader(ToolTimeLimit) +ExpressionToolOutputParameterLoader = _RecordLoader(ExpressionToolOutputParameter) +WorkflowInputParameterLoader = _RecordLoader(WorkflowInputParameter) +ExpressionToolLoader = _RecordLoader(ExpressionTool) +LinkMergeMethodLoader = _EnumLoader( + ( + "merge_nested", + "merge_flattened", + ), + "LinkMergeMethod", +) +""" +The input link merge method, described in [WorkflowStepInput](#WorkflowStepInput). +""" +PickValueMethodLoader = _EnumLoader( + ( + "first_non_null", + "the_only_non_null", + "all_non_null", + ), + "PickValueMethod", +) +""" +Picking non-null values among inbound data links, described in [WorkflowStepInput](#WorkflowStepInput). +""" +WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter) +WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput) +WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput) +ScatterMethodLoader = _EnumLoader( + ( + "dotproduct", + "nested_crossproduct", + "flat_crossproduct", + ), + "ScatterMethod", +) +""" +The scatter method, as described in [workflow step scatter](#WorkflowStep). +""" +WorkflowStepLoader = _RecordLoader(WorkflowStep) +WorkflowLoader = _RecordLoader(Workflow) +SubworkflowFeatureRequirementLoader = _RecordLoader(SubworkflowFeatureRequirement) +ScatterFeatureRequirementLoader = _RecordLoader(ScatterFeatureRequirement) +MultipleInputFeatureRequirementLoader = _RecordLoader(MultipleInputFeatureRequirement) +StepInputExpressionRequirementLoader = _RecordLoader(StepInputExpressionRequirement) +OperationInputParameterLoader = _RecordLoader(OperationInputParameter) +OperationOutputParameterLoader = _RecordLoader(OperationOutputParameter) +OperationLoader = _RecordLoader(Operation) +array_of_strtype = _ArrayLoader(strtype) +union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader( + ( + None_type, + strtype, + array_of_strtype, + ) +) +uri_strtype_True_False_None = _URILoader(strtype, True, False, None) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader( + ( + PrimitiveTypeLoader, + RecordSchemaLoader, + EnumSchemaLoader, + ArraySchemaLoader, + strtype, + ) +) +array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype +) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader( + ( + PrimitiveTypeLoader, + RecordSchemaLoader, + EnumSchemaLoader, + ArraySchemaLoader, + strtype, + array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, + 2, +) +array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) +union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_RecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_RecordFieldLoader, "name", "type" +) +Record_nameLoader = _EnumLoader(("record",), "Record_name") +typedsl_Record_nameLoader_2 = _TypeDSLLoader(Record_nameLoader, 2) +union_of_None_type_or_strtype = _UnionLoader( + ( + None_type, + strtype, + ) +) +uri_union_of_None_type_or_strtype_True_False_None = _URILoader( + union_of_None_type_or_strtype, True, False, None +) +uri_array_of_strtype_True_False_None = _URILoader(array_of_strtype, True, False, None) +Enum_nameLoader = _EnumLoader(("enum",), "Enum_name") +typedsl_Enum_nameLoader_2 = _TypeDSLLoader(Enum_nameLoader, 2) +uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2 = _URILoader( + union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, + False, + True, + 2, +) +Array_nameLoader = _EnumLoader(("array",), "Array_name") +typedsl_Array_nameLoader_2 = _TypeDSLLoader(Array_nameLoader, 2) +File_classLoader = _EnumLoader(("File",), "File_class") +uri_File_classLoader_False_True_None = _URILoader(File_classLoader, False, True, None) +uri_union_of_None_type_or_strtype_False_False_None = _URILoader( + union_of_None_type_or_strtype, False, False, None +) +union_of_None_type_or_inttype = _UnionLoader( + ( + None_type, + inttype, + ) +) +union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + FileLoader, + DirectoryLoader, + ) +) +array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( + union_of_FileLoader_or_DirectoryLoader +) +union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + None_type, + array_of_union_of_FileLoader_or_DirectoryLoader, + ) +) +secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _SecondaryDSLLoader( + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader +) +Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") +uri_Directory_classLoader_False_True_None = _URILoader( + Directory_classLoader, False, True, None +) +union_of_None_type_or_booltype = _UnionLoader( + ( + None_type, + booltype, + ) +) +union_of_None_type_or_LoadListingEnumLoader = _UnionLoader( + ( + None_type, + LoadListingEnumLoader, + ) +) +array_of_SecondaryFileSchemaLoader = _ArrayLoader(SecondaryFileSchemaLoader) +union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _UnionLoader( + ( + None_type, + SecondaryFileSchemaLoader, + array_of_SecondaryFileSchemaLoader, + ) +) +secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _SecondaryDSLLoader( + union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader +) +union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + strtype, + array_of_strtype, + ExpressionLoader, + ) +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, + True, + False, + None, +) +union_of_None_type_or_strtype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + strtype, + ExpressionLoader, + ) +) +uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None = _URILoader( + union_of_None_type_or_strtype_or_ExpressionLoader, True, False, None +) +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + 2, +) +array_of_InputRecordFieldLoader = _ArrayLoader(InputRecordFieldLoader) +union_of_None_type_or_array_of_InputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_InputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_InputRecordFieldLoader, "name", "type" +) +uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2 = _URILoader( + union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + False, + True, + 2, +) +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + OutputRecordSchemaLoader, + OutputEnumSchemaLoader, + OutputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + OutputRecordSchemaLoader, + OutputEnumSchemaLoader, + OutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + 2, +) +array_of_OutputRecordFieldLoader = _ArrayLoader(OutputRecordFieldLoader) +union_of_None_type_or_array_of_OutputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_OutputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_OutputRecordFieldLoader, "name", "type" +) +uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2 = _URILoader( + union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + False, + True, + 2, +) +union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type = _UnionLoader( + ( + None_type, + FileLoader, + DirectoryLoader, + Any_type, + ) +) +union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _UnionLoader( + ( + CommandInputParameterLoader, + WorkflowInputParameterLoader, + OperationInputParameterLoader, + ) +) +array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _ArrayLoader( + union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader +) +idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _IdMapLoader( + array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader, + "id", + "type", +) +union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _UnionLoader( + ( + CommandOutputParameterLoader, + ExpressionToolOutputParameterLoader, + WorkflowOutputParameterLoader, + OperationOutputParameterLoader, + ) +) +array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _ArrayLoader( + union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader +) +idmap_outputs_array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _IdMapLoader( + array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader, + "id", + "type", +) +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( + ( + InlineJavascriptRequirementLoader, + SchemaDefRequirementLoader, + LoadListingRequirementLoader, + DockerRequirementLoader, + SoftwareRequirementLoader, + InitialWorkDirRequirementLoader, + EnvVarRequirementLoader, + ShellCommandRequirementLoader, + ResourceRequirementLoader, + WorkReuseLoader, + NetworkAccessLoader, + InplaceUpdateRequirementLoader, + ToolTimeLimitLoader, + SubworkflowFeatureRequirementLoader, + ScatterFeatureRequirementLoader, + MultipleInputFeatureRequirementLoader, + StepInputExpressionRequirementLoader, + ) +) +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader +) +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( + ( + None_type, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + ) +) +idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + "class", + "None", +) +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( + ( + InlineJavascriptRequirementLoader, + SchemaDefRequirementLoader, + LoadListingRequirementLoader, + DockerRequirementLoader, + SoftwareRequirementLoader, + InitialWorkDirRequirementLoader, + EnvVarRequirementLoader, + ShellCommandRequirementLoader, + ResourceRequirementLoader, + WorkReuseLoader, + NetworkAccessLoader, + InplaceUpdateRequirementLoader, + ToolTimeLimitLoader, + SubworkflowFeatureRequirementLoader, + ScatterFeatureRequirementLoader, + MultipleInputFeatureRequirementLoader, + StepInputExpressionRequirementLoader, + Any_type, + ) +) +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type +) +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( + ( + None_type, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + ) +) +idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + "class", + "None", +) +union_of_None_type_or_CWLVersionLoader = _UnionLoader( + ( + None_type, + CWLVersionLoader, + ) +) +uri_union_of_None_type_or_CWLVersionLoader_False_True_None = _URILoader( + union_of_None_type_or_CWLVersionLoader, False, True, None +) +union_of_None_type_or_array_of_strtype = _UnionLoader( + ( + None_type, + array_of_strtype, + ) +) +uri_union_of_None_type_or_array_of_strtype_True_False_None = _URILoader( + union_of_None_type_or_array_of_strtype, True, False, None +) +InlineJavascriptRequirement_classLoader = _EnumLoader( + ("InlineJavascriptRequirement",), "InlineJavascriptRequirement_class" +) +uri_InlineJavascriptRequirement_classLoader_False_True_None = _URILoader( + InlineJavascriptRequirement_classLoader, False, True, None +) +SchemaDefRequirement_classLoader = _EnumLoader( + ("SchemaDefRequirement",), "SchemaDefRequirement_class" +) +uri_SchemaDefRequirement_classLoader_False_True_None = _URILoader( + SchemaDefRequirement_classLoader, False, True, None +) +union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _UnionLoader( + ( + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + ) +) +array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _ArrayLoader( + union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader +) +union_of_strtype_or_ExpressionLoader = _UnionLoader( + ( + strtype, + ExpressionLoader, + ) +) +union_of_None_type_or_booltype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + booltype, + ExpressionLoader, + ) +) +LoadListingRequirement_classLoader = _EnumLoader( + ("LoadListingRequirement",), "LoadListingRequirement_class" +) +uri_LoadListingRequirement_classLoader_False_True_None = _URILoader( + LoadListingRequirement_classLoader, False, True, None +) +union_of_None_type_or_inttype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + inttype, + ExpressionLoader, + ) +) +union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype = _UnionLoader( + ( + None_type, + strtype, + ExpressionLoader, + array_of_strtype, + ) +) +union_of_None_type_or_ExpressionLoader = _UnionLoader( + ( + None_type, + ExpressionLoader, + ) +) +union_of_None_type_or_CommandLineBindingLoader = _UnionLoader( + ( + None_type, + CommandLineBindingLoader, + ) +) +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + 2, +) +array_of_CommandInputRecordFieldLoader = _ArrayLoader(CommandInputRecordFieldLoader) +union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_CommandInputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" + ) +) +uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2 = _URILoader( + union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + False, + True, + 2, +) +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + 2, +) +union_of_None_type_or_CommandOutputBindingLoader = _UnionLoader( + ( + None_type, + CommandOutputBindingLoader, + ) +) +array_of_CommandOutputRecordFieldLoader = _ArrayLoader(CommandOutputRecordFieldLoader) +union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_CommandOutputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" + ) +) +uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2 = _URILoader( + union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + False, + True, + 2, +) +union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + stdinLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + 2, +) +union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + stdoutLoader, + stderrLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + 2, +) +CommandLineTool_classLoader = _EnumLoader(("CommandLineTool",), "CommandLineTool_class") +uri_CommandLineTool_classLoader_False_True_None = _URILoader( + CommandLineTool_classLoader, False, True, None +) +array_of_CommandInputParameterLoader = _ArrayLoader(CommandInputParameterLoader) +idmap_inputs_array_of_CommandInputParameterLoader = _IdMapLoader( + array_of_CommandInputParameterLoader, "id", "type" +) +array_of_CommandOutputParameterLoader = _ArrayLoader(CommandOutputParameterLoader) +idmap_outputs_array_of_CommandOutputParameterLoader = _IdMapLoader( + array_of_CommandOutputParameterLoader, "id", "type" +) +union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( + ( + strtype, + ExpressionLoader, + CommandLineBindingLoader, + ) +) +array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( + _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) +) +union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( + ( + None_type, + array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + ) +) +array_of_inttype = _ArrayLoader(inttype) +union_of_None_type_or_array_of_inttype = _UnionLoader( + ( + None_type, + array_of_inttype, + ) +) +DockerRequirement_classLoader = _EnumLoader( + ("DockerRequirement",), "DockerRequirement_class" +) +uri_DockerRequirement_classLoader_False_True_None = _URILoader( + DockerRequirement_classLoader, False, True, None +) +SoftwareRequirement_classLoader = _EnumLoader( + ("SoftwareRequirement",), "SoftwareRequirement_class" +) +uri_SoftwareRequirement_classLoader_False_True_None = _URILoader( + SoftwareRequirement_classLoader, False, True, None +) +array_of_SoftwarePackageLoader = _ArrayLoader(SoftwarePackageLoader) +idmap_packages_array_of_SoftwarePackageLoader = _IdMapLoader( + array_of_SoftwarePackageLoader, "package", "specs" +) +uri_union_of_None_type_or_array_of_strtype_False_False_None = _URILoader( + union_of_None_type_or_array_of_strtype, False, False, None +) +InitialWorkDirRequirement_classLoader = _EnumLoader( + ("InitialWorkDirRequirement",), "InitialWorkDirRequirement_class" +) +uri_InitialWorkDirRequirement_classLoader_False_True_None = _URILoader( + InitialWorkDirRequirement_classLoader, False, True, None +) +union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + None_type, + DirentLoader, + ExpressionLoader, + FileLoader, + DirectoryLoader, + array_of_union_of_FileLoader_or_DirectoryLoader, + ) +) +array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( + union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader +) +union_of_ExpressionLoader_or_array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + ExpressionLoader, + array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader, + ) +) +EnvVarRequirement_classLoader = _EnumLoader( + ("EnvVarRequirement",), "EnvVarRequirement_class" +) +uri_EnvVarRequirement_classLoader_False_True_None = _URILoader( + EnvVarRequirement_classLoader, False, True, None +) +array_of_EnvironmentDefLoader = _ArrayLoader(EnvironmentDefLoader) +idmap_envDef_array_of_EnvironmentDefLoader = _IdMapLoader( + array_of_EnvironmentDefLoader, "envName", "envValue" +) +ShellCommandRequirement_classLoader = _EnumLoader( + ("ShellCommandRequirement",), "ShellCommandRequirement_class" +) +uri_ShellCommandRequirement_classLoader_False_True_None = _URILoader( + ShellCommandRequirement_classLoader, False, True, None +) +ResourceRequirement_classLoader = _EnumLoader( + ("ResourceRequirement",), "ResourceRequirement_class" +) +uri_ResourceRequirement_classLoader_False_True_None = _URILoader( + ResourceRequirement_classLoader, False, True, None +) +union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + inttype, + floattype, + ExpressionLoader, + ) +) +WorkReuse_classLoader = _EnumLoader(("WorkReuse",), "WorkReuse_class") +uri_WorkReuse_classLoader_False_True_None = _URILoader( + WorkReuse_classLoader, False, True, None +) +union_of_booltype_or_ExpressionLoader = _UnionLoader( + ( + booltype, + ExpressionLoader, + ) +) +NetworkAccess_classLoader = _EnumLoader(("NetworkAccess",), "NetworkAccess_class") +uri_NetworkAccess_classLoader_False_True_None = _URILoader( + NetworkAccess_classLoader, False, True, None +) +InplaceUpdateRequirement_classLoader = _EnumLoader( + ("InplaceUpdateRequirement",), "InplaceUpdateRequirement_class" +) +uri_InplaceUpdateRequirement_classLoader_False_True_None = _URILoader( + InplaceUpdateRequirement_classLoader, False, True, None +) +ToolTimeLimit_classLoader = _EnumLoader(("ToolTimeLimit",), "ToolTimeLimit_class") +uri_ToolTimeLimit_classLoader_False_True_None = _URILoader( + ToolTimeLimit_classLoader, False, True, None +) +union_of_inttype_or_ExpressionLoader = _UnionLoader( + ( + inttype, + ExpressionLoader, + ) +) +union_of_None_type_or_InputBindingLoader = _UnionLoader( + ( + None_type, + InputBindingLoader, + ) +) +ExpressionTool_classLoader = _EnumLoader(("ExpressionTool",), "ExpressionTool_class") +uri_ExpressionTool_classLoader_False_True_None = _URILoader( + ExpressionTool_classLoader, False, True, None +) +array_of_WorkflowInputParameterLoader = _ArrayLoader(WorkflowInputParameterLoader) +idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader( + array_of_WorkflowInputParameterLoader, "id", "type" +) +array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( + ExpressionToolOutputParameterLoader +) +idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( + array_of_ExpressionToolOutputParameterLoader, "id", "type" +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1 = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1 +) +union_of_None_type_or_LinkMergeMethodLoader = _UnionLoader( + ( + None_type, + LinkMergeMethodLoader, + ) +) +union_of_None_type_or_PickValueMethodLoader = _UnionLoader( + ( + None_type, + PickValueMethodLoader, + ) +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2 = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2 +) +array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader) +idmap_in__array_of_WorkflowStepInputLoader = _IdMapLoader( + array_of_WorkflowStepInputLoader, "id", "source" +) +union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( + ( + strtype, + WorkflowStepOutputLoader, + ) +) +array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader( + union_of_strtype_or_WorkflowStepOutputLoader +) +union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( + (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) +) +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = ( + _URILoader( + union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, + True, + False, + None, + ) +) +array_of_Any_type = _ArrayLoader(Any_type) +union_of_None_type_or_array_of_Any_type = _UnionLoader( + ( + None_type, + array_of_Any_type, + ) +) +idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( + union_of_None_type_or_array_of_Any_type, "class", "None" +) +union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _UnionLoader( + ( + strtype, + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + OperationLoader, + ) +) +uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_False_False_None = _URILoader( + union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + False, + False, + None, +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0 = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 0 +) +union_of_None_type_or_ScatterMethodLoader = _UnionLoader( + ( + None_type, + ScatterMethodLoader, + ) +) +uri_union_of_None_type_or_ScatterMethodLoader_False_True_None = _URILoader( + union_of_None_type_or_ScatterMethodLoader, False, True, None +) +Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") +uri_Workflow_classLoader_False_True_None = _URILoader( + Workflow_classLoader, False, True, None +) +array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) +idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( + array_of_WorkflowOutputParameterLoader, "id", "type" +) +array_of_WorkflowStepLoader = _ArrayLoader(WorkflowStepLoader) +union_of_array_of_WorkflowStepLoader = _UnionLoader((array_of_WorkflowStepLoader,)) +idmap_steps_union_of_array_of_WorkflowStepLoader = _IdMapLoader( + union_of_array_of_WorkflowStepLoader, "id", "None" +) +SubworkflowFeatureRequirement_classLoader = _EnumLoader( + ("SubworkflowFeatureRequirement",), "SubworkflowFeatureRequirement_class" +) +uri_SubworkflowFeatureRequirement_classLoader_False_True_None = _URILoader( + SubworkflowFeatureRequirement_classLoader, False, True, None +) +ScatterFeatureRequirement_classLoader = _EnumLoader( + ("ScatterFeatureRequirement",), "ScatterFeatureRequirement_class" +) +uri_ScatterFeatureRequirement_classLoader_False_True_None = _URILoader( + ScatterFeatureRequirement_classLoader, False, True, None +) +MultipleInputFeatureRequirement_classLoader = _EnumLoader( + ("MultipleInputFeatureRequirement",), "MultipleInputFeatureRequirement_class" +) +uri_MultipleInputFeatureRequirement_classLoader_False_True_None = _URILoader( + MultipleInputFeatureRequirement_classLoader, False, True, None +) +StepInputExpressionRequirement_classLoader = _EnumLoader( + ("StepInputExpressionRequirement",), "StepInputExpressionRequirement_class" +) +uri_StepInputExpressionRequirement_classLoader_False_True_None = _URILoader( + StepInputExpressionRequirement_classLoader, False, True, None +) +Operation_classLoader = _EnumLoader(("Operation",), "Operation_class") +uri_Operation_classLoader_False_True_None = _URILoader( + Operation_classLoader, False, True, None +) +array_of_OperationInputParameterLoader = _ArrayLoader(OperationInputParameterLoader) +idmap_inputs_array_of_OperationInputParameterLoader = _IdMapLoader( + array_of_OperationInputParameterLoader, "id", "type" +) +array_of_OperationOutputParameterLoader = _ArrayLoader(OperationOutputParameterLoader) +idmap_outputs_array_of_OperationOutputParameterLoader = _IdMapLoader( + array_of_OperationOutputParameterLoader, "id", "type" +) +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _UnionLoader( + ( + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + OperationLoader, + ) +) +array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _ArrayLoader( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader +) +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _UnionLoader( + ( + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + OperationLoader, + array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + ) +) + + +def load_document( + doc: Any, + baseuri: Optional[str] = None, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + if baseuri is None: + baseuri = file_uri(os.getcwd()) + "/" + if loadingOptions is None: + loadingOptions = LoadingOptions() + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + doc, + baseuri, + loadingOptions, + ) + return result + + +def load_document_with_metadata( + doc: Any, + baseuri: Optional[str] = None, + loadingOptions: Optional[LoadingOptions] = None, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> Any: + if baseuri is None: + baseuri = file_uri(os.getcwd()) + "/" + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=baseuri) + return _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + doc, + baseuri, + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + +def load_document_by_string( + string: Any, + uri: str, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + yaml = yaml_no_ts() + result = yaml.load(string) + add_lc_filename(result, uri) + + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=uri) + + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + result, + uri, + loadingOptions, + ) + return result + + +def load_document_by_yaml( + yaml: Any, + uri: str, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + """ + Shortcut to load via a YAML object. + yaml: must be from ruamel.yaml.main.YAML.load with preserve_quotes=True + """ + add_lc_filename(yaml, uri) + + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=uri) + + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + yaml, + uri, + loadingOptions, + ) + return result diff --git a/schema_salad/metaschema.py b/schema_salad/metaschema.py index 7b2261ef6..04b472525 100644 --- a/schema_salad/metaschema.py +++ b/schema_salad/metaschema.py @@ -1137,7 +1137,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, RecordField): return bool( - self.doc == other.doc and self.name == other.name and self.type == other.type + self.doc == other.doc + and self.name == other.name + and self.type == other.type ) return False @@ -1221,12 +1223,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`".format(k), + "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1251,7 +1257,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1282,11 +1288,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -1294,12 +1301,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1314,7 +1323,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1463,12 +1472,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format(k), + "invalid field `{}`, expected one of: `fields`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1491,7 +1504,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1522,11 +1535,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -1534,12 +1548,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1554,7 +1570,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -1723,7 +1739,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -1755,7 +1773,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1786,11 +1804,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -1798,12 +1817,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1818,7 +1839,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1836,7 +1857,9 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1958,12 +1981,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format(k), + "invalid field `{}`, expected one of: `items`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1986,7 +2013,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -2017,11 +2044,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -2029,12 +2057,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -2049,7 +2079,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.items is not None and "items" not in r: u = save_relative_uri(self.items, base_url, False, 2, relative_uris) @@ -2392,7 +2422,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2431,7 +2463,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -2462,11 +2494,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -2474,12 +2507,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -2494,7 +2529,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self._id is not None and "_id" not in r: u = save_relative_uri(self._id, base_url, True, None, relative_uris) @@ -2825,7 +2860,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2855,7 +2892,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -2886,11 +2923,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -2898,12 +2936,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -2918,10 +2958,12 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.specializeFrom is not None and "specializeFrom" not in r: - u = save_relative_uri(self.specializeFrom, base_url, False, 1, relative_uris) + u = save_relative_uri( + self.specializeFrom, base_url, False, 1, relative_uris + ) r["specializeFrom"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3021,7 +3063,9 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash((self.doc, self.name, self.type, self.jsonldPredicate, self.default)) + return hash( + (self.doc, self.name, self.type, self.jsonldPredicate, self.default) + ) @classmethod def fromDoc( @@ -3136,7 +3180,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3170,7 +3216,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -3201,11 +3247,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -3213,12 +3260,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3233,7 +3282,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3683,7 +3732,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3725,7 +3776,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -3756,11 +3807,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -3768,12 +3820,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3788,7 +3842,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3890,7 +3944,9 @@ def save( shift=shift, ) if self.docParent is not None and "docParent" not in r: - u = save_relative_uri(self.docParent, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docParent, str(self.name), False, None, relative_uris + ) r["docParent"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3905,7 +3961,9 @@ def save( shift=shift, ) if self.docChild is not None and "docChild" not in r: - u = save_relative_uri(self.docChild, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docChild, str(self.name), False, None, relative_uris + ) r["docChild"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3920,7 +3978,9 @@ def save( shift=shift, ) if self.docAfter is not None and "docAfter" not in r: - u = save_relative_uri(self.docAfter, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docAfter, str(self.name), False, None, relative_uris + ) r["docAfter"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4356,7 +4416,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4396,7 +4458,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4427,11 +4489,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -4439,12 +4502,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -4459,7 +4524,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4498,7 +4563,9 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4555,7 +4622,9 @@ def save( shift=shift, ) if self.docParent is not None and "docParent" not in r: - u = save_relative_uri(self.docParent, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docParent, str(self.name), False, None, relative_uris + ) r["docParent"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4570,7 +4639,9 @@ def save( shift=shift, ) if self.docChild is not None and "docChild" not in r: - u = save_relative_uri(self.docChild, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docChild, str(self.name), False, None, relative_uris + ) r["docChild"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4585,7 +4656,9 @@ def save( shift=shift, ) if self.docAfter is not None and "docAfter" not in r: - u = save_relative_uri(self.docAfter, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docAfter, str(self.name), False, None, relative_uris + ) r["docAfter"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4893,7 +4966,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4929,7 +5004,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4960,11 +5035,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -4972,12 +5048,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -4992,7 +5070,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5052,7 +5130,9 @@ def save( shift=shift, ) if self.docParent is not None and "docParent" not in r: - u = save_relative_uri(self.docParent, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docParent, str(self.name), False, None, relative_uris + ) r["docParent"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5067,7 +5147,9 @@ def save( shift=shift, ) if self.docChild is not None and "docChild" not in r: - u = save_relative_uri(self.docChild, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docChild, str(self.name), False, None, relative_uris + ) r["docChild"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5082,7 +5164,9 @@ def save( shift=shift, ) if self.docAfter is not None and "docAfter" not in r: - u = save_relative_uri(self.docAfter, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docAfter, str(self.name), False, None, relative_uris + ) r["docAfter"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5126,7 +5210,9 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["name", "inVocab", "doc", "docParent", "docChild", "docAfter", "type"]) + attrs = frozenset( + ["name", "inVocab", "doc", "docParent", "docChild", "docAfter", "type"] + ) _vocab = { @@ -5358,15 +5444,17 @@ def save( ) Documentation_nameLoader = _EnumLoader(("documentation",), "Documentation_name") typedsl_Documentation_nameLoader_2 = _TypeDSLLoader(Documentation_nameLoader, 2) -union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _UnionLoader( - ( - SaladRecordSchemaLoader, - SaladEnumSchemaLoader, - DocumentationLoader, +union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = ( + _UnionLoader( + ( + SaladRecordSchemaLoader, + SaladEnumSchemaLoader, + DocumentationLoader, + ) ) ) -array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = ( - _ArrayLoader(union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader) +array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _ArrayLoader( + union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader ) union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader_or_array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _UnionLoader( ( diff --git a/schema_salad/python_codegen.py b/schema_salad/python_codegen.py index 1becd9122..14cd078fa 100644 --- a/schema_salad/python_codegen.py +++ b/schema_salad/python_codegen.py @@ -384,9 +384,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -430,9 +431,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, diff --git a/schema_salad/tests/cwl_v1_0.py b/schema_salad/tests/cwl_v1_0.py index a04565ca0..60898f1e6 100644 --- a/schema_salad/tests/cwl_v1_0.py +++ b/schema_salad/tests/cwl_v1_0.py @@ -1133,7 +1133,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, RecordField): return bool( - self.name == other.name and self.doc == other.doc and self.type == other.type + self.name == other.name + and self.doc == other.doc + and self.type == other.type ) return False @@ -1217,12 +1219,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `doc`, `type`".format(k), + "invalid field `{}`, expected one of: `name`, `doc`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1247,7 +1253,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1278,11 +1284,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -1290,12 +1297,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1310,7 +1319,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1459,12 +1468,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format(k), + "invalid field `{}`, expected one of: `fields`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1487,7 +1500,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1518,11 +1531,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -1530,12 +1544,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1550,7 +1566,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -1686,12 +1702,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`".format(k), + "invalid field `{}`, expected one of: `symbols`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1714,7 +1734,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1745,11 +1765,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -1757,12 +1778,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1777,7 +1800,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -1902,12 +1925,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format(k), + "invalid field `{}`, expected one of: `items`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1930,7 +1957,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1961,11 +1988,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -1973,12 +2001,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1993,7 +2023,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.items is not None and "items" not in r: r["items"] = save( @@ -2413,7 +2443,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2452,7 +2484,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -2485,11 +2517,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -2497,12 +2530,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -2517,7 +2552,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -2843,7 +2878,9 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash((self.class_, self.location, self.path, self.basename, self.listing)) + return hash( + (self.class_, self.location, self.path, self.basename, self.listing) + ) @classmethod def fromDoc( @@ -2938,7 +2975,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2970,7 +3009,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -3003,11 +3042,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -3015,12 +3055,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3035,7 +3077,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -3303,7 +3345,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3337,7 +3381,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -3368,11 +3412,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -3380,12 +3425,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3400,7 +3447,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3645,7 +3692,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3678,7 +3727,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -3709,11 +3758,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -3721,12 +3771,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3741,7 +3793,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3983,7 +4035,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4017,7 +4071,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4048,11 +4102,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -4060,12 +4115,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -4080,7 +4137,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4098,7 +4155,9 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4307,7 +4366,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4339,7 +4400,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4370,11 +4431,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -4382,12 +4444,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -4402,7 +4466,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.items is not None and "items" not in r: r["items"] = save( @@ -4632,7 +4696,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4665,7 +4731,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4696,11 +4762,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -4708,12 +4775,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -4728,7 +4797,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4922,7 +4991,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4953,7 +5024,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4984,11 +5055,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -4996,12 +5068,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -5016,7 +5090,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -5213,7 +5287,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5245,7 +5321,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -5276,11 +5352,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -5288,12 +5365,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -5308,7 +5387,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -5520,7 +5599,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5552,7 +5633,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -5583,11 +5664,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -5595,12 +5677,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -5615,7 +5699,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.items is not None and "items" not in r: r["items"] = save( @@ -5965,7 +6049,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6003,7 +6089,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -6019,7 +6105,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -6049,11 +6135,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -6061,12 +6148,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -6081,7 +6170,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -6492,7 +6581,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6528,7 +6619,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -6544,7 +6635,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -6574,11 +6665,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -6586,12 +6678,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -6606,7 +6700,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -6819,7 +6913,10 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, InlineJavascriptRequirement): - return bool(self.class_ == other.class_ and self.expressionLib == other.expressionLib) + return bool( + self.class_ == other.class_ + and self.expressionLib == other.expressionLib + ) return False def __hash__(self) -> int: @@ -6864,7 +6961,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6878,7 +6977,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'InlineJavascriptRequirement'", None, _errors__) + raise ValidationException( + "Trying 'InlineJavascriptRequirement'", None, _errors__ + ) _constructed = cls( expressionLib=expressionLib, extension_fields=extension_fields, @@ -6893,7 +6994,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -6926,11 +7027,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -6938,12 +7040,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -6958,7 +7062,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.expressionLib is not None and "expressionLib" not in r: r["expressionLib"] = save( @@ -7067,12 +7171,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `types`".format(k), + "invalid field `{}`, expected one of: `class`, `types`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -7094,7 +7202,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -7127,11 +7235,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -7139,12 +7248,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -7159,7 +7270,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.types is not None and "types" not in r: r["types"] = save( @@ -7223,7 +7334,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, EnvironmentDef): - return bool(self.envName == other.envName and self.envValue == other.envValue) + return bool( + self.envName == other.envName and self.envValue == other.envValue + ) return False def __hash__(self) -> int: @@ -7276,12 +7389,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `envName`, `envValue`".format(k), + "invalid field `{}`, expected one of: `envName`, `envValue`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -7304,7 +7421,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -7335,11 +7452,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -7347,12 +7465,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -7367,7 +7487,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.envName is not None and "envName" not in r: r["envName"] = save( @@ -7661,7 +7781,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -7696,7 +7818,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -7727,11 +7849,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -7739,12 +7862,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -7759,7 +7884,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8049,7 +8174,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8080,7 +8207,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -8111,11 +8238,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -8123,12 +8251,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -8143,7 +8273,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.glob is not None and "glob" not in r: r["glob"] = save( @@ -8373,7 +8503,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8387,7 +8519,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandInputRecordField'", None, _errors__) + raise ValidationException( + "Trying 'CommandInputRecordField'", None, _errors__ + ) _constructed = cls( name=name, doc=doc, @@ -8407,7 +8541,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -8438,11 +8572,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -8450,12 +8585,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -8470,7 +8607,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -8715,7 +8852,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8729,7 +8868,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandInputRecordSchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandInputRecordSchema'", None, _errors__ + ) _constructed = cls( fields=fields, type=type, @@ -8748,7 +8889,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -8779,11 +8920,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -8791,12 +8933,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -8811,7 +8955,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9053,7 +9197,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9067,7 +9213,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandInputEnumSchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandInputEnumSchema'", None, _errors__ + ) _constructed = cls( symbols=symbols, type=type, @@ -9087,7 +9235,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -9118,11 +9266,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -9130,12 +9279,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -9150,7 +9301,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9168,7 +9319,9 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9377,7 +9530,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9391,7 +9546,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandInputArraySchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandInputArraySchema'", None, _errors__ + ) _constructed = cls( items=items, type=type, @@ -9409,7 +9566,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -9440,11 +9597,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -9452,12 +9610,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -9472,7 +9632,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.items is not None and "items" not in r: r["items"] = save( @@ -9702,7 +9862,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9716,7 +9878,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputRecordField'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputRecordField'", None, _errors__ + ) _constructed = cls( name=name, doc=doc, @@ -9735,7 +9899,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -9766,11 +9930,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -9778,12 +9943,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -9798,7 +9965,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10022,7 +10189,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10036,7 +10205,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputRecordSchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputRecordSchema'", None, _errors__ + ) _constructed = cls( fields=fields, type=type, @@ -10055,7 +10226,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -10086,11 +10257,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -10098,12 +10270,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -10118,7 +10292,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10330,7 +10504,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10344,7 +10520,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputEnumSchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputEnumSchema'", None, _errors__ + ) _constructed = cls( symbols=symbols, type=type, @@ -10362,7 +10540,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -10393,11 +10571,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -10405,12 +10584,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -10425,7 +10606,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -10637,7 +10818,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10651,7 +10834,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputArraySchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputArraySchema'", None, _errors__ + ) _constructed = cls( items=items, type=type, @@ -10669,7 +10854,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -10700,11 +10885,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -10712,12 +10898,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -10732,7 +10920,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.items is not None and "items" not in r: r["items"] = save( @@ -11086,7 +11274,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11124,7 +11314,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -11140,7 +11330,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -11170,11 +11360,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -11182,12 +11373,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -11202,7 +11395,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -11639,7 +11832,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11653,7 +11848,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputParameter'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputParameter'", None, _errors__ + ) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -11676,7 +11873,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -11692,7 +11889,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -11722,11 +11919,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -11734,12 +11932,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -11754,7 +11954,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -12347,7 +12547,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -12392,7 +12594,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -12408,7 +12610,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -12440,11 +12642,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -12452,12 +12655,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -12472,7 +12677,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -12616,7 +12821,9 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13051,7 +13258,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -13085,7 +13294,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -13118,11 +13327,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -13130,12 +13340,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -13150,7 +13362,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.dockerPull is not None and "dockerPull" not in r: r["dockerPull"] = save( @@ -13369,12 +13581,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `packages`".format(k), + "invalid field `{}`, expected one of: `class`, `packages`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -13396,7 +13612,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -13429,11 +13645,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -13441,12 +13658,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -13461,7 +13680,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.packages is not None and "packages" not in r: r["packages"] = save( @@ -13598,7 +13817,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -13629,7 +13850,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -13660,11 +13881,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -13672,12 +13894,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -13692,7 +13916,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.package is not None and "package" not in r: r["package"] = save( @@ -13873,7 +14097,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -13904,7 +14130,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -13935,11 +14161,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -13947,12 +14174,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -13967,7 +14196,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.entryname is not None and "entryname" not in r: r["entryname"] = save( @@ -14111,19 +14340,25 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `listing`".format(k), + "invalid field `{}`, expected one of: `class`, `listing`".format( + k + ), SourceLine(_doc, k, str), ) ) break if _errors__: - raise ValidationException("Trying 'InitialWorkDirRequirement'", None, _errors__) + raise ValidationException( + "Trying 'InitialWorkDirRequirement'", None, _errors__ + ) _constructed = cls( listing=listing, extension_fields=extension_fields, @@ -14138,7 +14373,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -14171,11 +14406,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -14183,12 +14419,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -14203,7 +14441,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.listing is not None and "listing" not in r: r["listing"] = save( @@ -14307,12 +14545,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `envDef`".format(k), + "invalid field `{}`, expected one of: `class`, `envDef`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -14334,7 +14576,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -14367,11 +14609,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -14379,12 +14622,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -14399,7 +14644,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.envDef is not None and "envDef" not in r: r["envDef"] = save( @@ -14491,7 +14736,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -14503,7 +14750,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'ShellCommandRequirement'", None, _errors__) + raise ValidationException( + "Trying 'ShellCommandRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -14517,7 +14766,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -14550,11 +14799,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -14562,12 +14812,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -14582,7 +14834,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -14848,7 +15100,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -14884,7 +15138,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -14917,11 +15171,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -14929,12 +15184,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -14949,7 +15206,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.coresMin is not None and "coresMin" not in r: r["coresMin"] = save( @@ -15373,7 +15630,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -15387,7 +15646,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'ExpressionToolOutputParameter'", None, _errors__) + raise ValidationException( + "Trying 'ExpressionToolOutputParameter'", None, _errors__ + ) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -15410,7 +15671,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -15426,7 +15687,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -15456,11 +15717,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -15468,12 +15730,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -15488,7 +15752,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -15924,7 +16188,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -15962,7 +16228,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -15978,7 +16244,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -16010,11 +16276,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -16022,12 +16289,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -16042,7 +16311,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -16186,7 +16455,9 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16527,7 +16798,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -16541,7 +16814,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'WorkflowOutputParameter'", None, _errors__) + raise ValidationException( + "Trying 'WorkflowOutputParameter'", None, _errors__ + ) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -16566,7 +16841,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -16582,7 +16857,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -16612,11 +16887,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -16624,12 +16900,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -16644,7 +16922,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -16782,7 +17060,9 @@ def save( shift=shift, ) if self.outputSource is not None and "outputSource" not in r: - u = save_relative_uri(self.outputSource, str(self.id), False, 1, relative_uris) + u = save_relative_uri( + self.outputSource, str(self.id), False, 1, relative_uris + ) r["outputSource"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16948,7 +17228,9 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash((self.source, self.linkMerge, self.id, self.default, self.valueFrom)) + return hash( + (self.source, self.linkMerge, self.id, self.default, self.valueFrom) + ) @classmethod def fromDoc( @@ -17066,7 +17348,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17100,7 +17384,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -17116,7 +17400,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -17146,11 +17430,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -17158,12 +17443,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -17178,7 +17465,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -17363,7 +17650,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17391,7 +17680,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -17407,7 +17696,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -17437,11 +17726,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -17449,12 +17739,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -17469,7 +17761,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -17822,7 +18114,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17861,7 +18155,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -17877,7 +18171,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -17907,11 +18201,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -17919,12 +18214,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -17939,7 +18236,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18107,7 +18404,9 @@ def save( shift=shift, ) if self.scatterMethod is not None and "scatterMethod" not in r: - u = save_relative_uri(self.scatterMethod, str(self.id), False, None, relative_uris) + u = save_relative_uri( + self.scatterMethod, str(self.id), False, None, relative_uris + ) r["scatterMethod"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18445,7 +18744,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -18483,7 +18784,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -18499,7 +18800,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -18531,11 +18832,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -18543,12 +18845,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -18563,7 +18867,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18707,7 +19011,9 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18819,7 +19125,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -18831,7 +19139,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'SubworkflowFeatureRequirement'", None, _errors__) + raise ValidationException( + "Trying 'SubworkflowFeatureRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -18845,7 +19155,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -18878,11 +19188,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -18890,12 +19201,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -18910,7 +19223,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -18976,7 +19289,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -18988,7 +19303,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'ScatterFeatureRequirement'", None, _errors__) + raise ValidationException( + "Trying 'ScatterFeatureRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -19002,7 +19319,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -19035,11 +19352,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -19047,12 +19365,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -19067,7 +19387,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -19133,7 +19453,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19145,7 +19467,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'MultipleInputFeatureRequirement'", None, _errors__) + raise ValidationException( + "Trying 'MultipleInputFeatureRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -19159,7 +19483,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -19192,11 +19516,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -19204,12 +19529,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -19224,7 +19551,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -19290,7 +19617,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19302,7 +19631,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'StepInputExpressionRequirement'", None, _errors__) + raise ValidationException( + "Trying 'StepInputExpressionRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -19316,7 +19647,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -19349,11 +19680,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -19361,12 +19693,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -19381,7 +19715,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -19910,14 +20244,18 @@ def save( union_of_None_type_or_strtype, True, False, None ) Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") -uri_Directory_classLoader_False_True_None = _URILoader(Directory_classLoader, False, True, None) +uri_Directory_classLoader_False_True_None = _URILoader( + Directory_classLoader, False, True, None +) union_of_strtype_or_ExpressionLoader = _UnionLoader( ( strtype, ExpressionLoader, ) ) -array_of_union_of_strtype_or_ExpressionLoader = _ArrayLoader(union_of_strtype_or_ExpressionLoader) +array_of_union_of_strtype_or_ExpressionLoader = _ArrayLoader( + union_of_strtype_or_ExpressionLoader +) union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader = _UnionLoader( ( None_type, @@ -20031,13 +20369,11 @@ def save( ExpressionLoader, ) ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = ( - _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, - True, - False, - None, - ) +uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, + True, + False, + None, ) union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type = _UnionLoader( ( @@ -20168,17 +20504,17 @@ def save( uri_SchemaDefRequirement_classLoader_False_True_None = _URILoader( SchemaDefRequirement_classLoader, False, True, None ) -union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = _UnionLoader( - ( - InputRecordSchemaLoader, - InputEnumSchemaLoader, - InputArraySchemaLoader, +union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = ( + _UnionLoader( + ( + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, + ) ) ) -array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = ( - _ArrayLoader( - union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader - ) +array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = _ArrayLoader( + union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader ) union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype = _UnionLoader( ( @@ -20221,8 +20557,10 @@ def save( array_of_CommandInputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" +idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" + ) ) union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( ( @@ -20257,8 +20595,10 @@ def save( array_of_CommandOutputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" +idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" + ) ) union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( ( @@ -20311,15 +20651,13 @@ def save( CommandLineBindingLoader, ) ) -array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _ArrayLoader( - union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader +array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( + _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) ) -union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( - _UnionLoader( - ( - None_type, - array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, - ) +union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( + ( + None_type, + array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, ) ) array_of_inttype = _ArrayLoader(inttype) @@ -20329,11 +20667,15 @@ def save( array_of_inttype, ) ) -DockerRequirement_classLoader = _EnumLoader(("DockerRequirement",), "DockerRequirement_class") +DockerRequirement_classLoader = _EnumLoader( + ("DockerRequirement",), "DockerRequirement_class" +) uri_DockerRequirement_classLoader_False_True_None = _URILoader( DockerRequirement_classLoader, False, True, None ) -SoftwareRequirement_classLoader = _EnumLoader(("SoftwareRequirement",), "SoftwareRequirement_class") +SoftwareRequirement_classLoader = _EnumLoader( + ("SoftwareRequirement",), "SoftwareRequirement_class" +) uri_SoftwareRequirement_classLoader_False_True_None = _URILoader( SoftwareRequirement_classLoader, False, True, None ) @@ -20350,21 +20692,17 @@ def save( uri_InitialWorkDirRequirement_classLoader_False_True_None = _URILoader( InitialWorkDirRequirement_classLoader, False, True, None ) -union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = ( - _UnionLoader( - ( - FileLoader, - DirectoryLoader, - DirentLoader, - strtype, - ExpressionLoader, - ) +union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = _UnionLoader( + ( + FileLoader, + DirectoryLoader, + DirentLoader, + strtype, + ExpressionLoader, ) ) -array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = ( - _ArrayLoader( - union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader - ) +array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = _ArrayLoader( + union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader ) union_of_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader_or_strtype_or_ExpressionLoader = _UnionLoader( ( @@ -20373,7 +20711,9 @@ def save( ExpressionLoader, ) ) -EnvVarRequirement_classLoader = _EnumLoader(("EnvVarRequirement",), "EnvVarRequirement_class") +EnvVarRequirement_classLoader = _EnumLoader( + ("EnvVarRequirement",), "EnvVarRequirement_class" +) uri_EnvVarRequirement_classLoader_False_True_None = _URILoader( EnvVarRequirement_classLoader, False, True, None ) @@ -20387,7 +20727,9 @@ def save( uri_ShellCommandRequirement_classLoader_False_True_None = _URILoader( ShellCommandRequirement_classLoader, False, True, None ) -ResourceRequirement_classLoader = _EnumLoader(("ResourceRequirement",), "ResourceRequirement_class") +ResourceRequirement_classLoader = _EnumLoader( + ("ResourceRequirement",), "ResourceRequirement_class" +) uri_ResourceRequirement_classLoader_False_True_None = _URILoader( ResourceRequirement_classLoader, False, True, None ) @@ -20418,7 +20760,9 @@ def save( uri_ExpressionTool_classLoader_False_True_None = _URILoader( ExpressionTool_classLoader, False, True, None ) -array_of_ExpressionToolOutputParameterLoader = _ArrayLoader(ExpressionToolOutputParameterLoader) +array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( + ExpressionToolOutputParameterLoader +) idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( array_of_ExpressionToolOutputParameterLoader, "id", "type" ) @@ -20450,11 +20794,13 @@ def save( union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) ) -uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = _URILoader( - union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, - True, - False, - None, +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = ( + _URILoader( + union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, + True, + False, + None, + ) ) array_of_Any_type = _ArrayLoader(Any_type) union_of_None_type_or_array_of_Any_type = _UnionLoader( @@ -20466,12 +20812,14 @@ def save( idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( union_of_None_type_or_array_of_Any_type, "class", "None" ) -union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( - ( - strtype, - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, +union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( + _UnionLoader( + ( + strtype, + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + ) ) ) uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None = _URILoader( @@ -20493,7 +20841,9 @@ def save( union_of_None_type_or_ScatterMethodLoader, False, True, None ) Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") -uri_Workflow_classLoader_False_True_None = _URILoader(Workflow_classLoader, False, True, None) +uri_Workflow_classLoader_False_True_None = _URILoader( + Workflow_classLoader, False, True, None +) array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( array_of_WorkflowOutputParameterLoader, "id", "type" @@ -20534,8 +20884,10 @@ def save( WorkflowLoader, ) ) -array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _ArrayLoader( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader +array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( + _ArrayLoader( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader + ) ) union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( ( diff --git a/schema_salad/tests/cwl_v1_1.py b/schema_salad/tests/cwl_v1_1.py index 1488c286a..bdceca981 100644 --- a/schema_salad/tests/cwl_v1_1.py +++ b/schema_salad/tests/cwl_v1_1.py @@ -1137,7 +1137,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, RecordField): return bool( - self.doc == other.doc and self.name == other.name and self.type == other.type + self.doc == other.doc + and self.name == other.name + and self.type == other.type ) return False @@ -1221,12 +1223,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`".format(k), + "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1251,7 +1257,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1282,11 +1288,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -1294,12 +1301,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1314,7 +1323,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1463,12 +1472,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format(k), + "invalid field `{}`, expected one of: `fields`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1491,7 +1504,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1522,11 +1535,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -1534,12 +1548,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1554,7 +1570,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -1690,12 +1706,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`".format(k), + "invalid field `{}`, expected one of: `symbols`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1718,7 +1738,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1749,11 +1769,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -1761,12 +1782,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1781,7 +1804,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -1906,12 +1929,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format(k), + "invalid field `{}`, expected one of: `items`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1934,7 +1961,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1965,11 +1992,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -1977,12 +2005,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1997,7 +2027,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.items is not None and "items" not in r: r["items"] = save( @@ -2417,7 +2447,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2456,7 +2488,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -2489,11 +2521,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -2501,12 +2534,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -2521,7 +2556,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -2847,7 +2882,9 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash((self.class_, self.location, self.path, self.basename, self.listing)) + return hash( + (self.class_, self.location, self.path, self.basename, self.listing) + ) @classmethod def fromDoc( @@ -2942,7 +2979,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2974,7 +3013,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -3007,11 +3046,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -3019,12 +3059,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3039,7 +3081,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -3219,12 +3261,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `loadContents`".format(k), + "invalid field `{}`, expected one of: `loadContents`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -3246,7 +3292,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -3277,11 +3323,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -3289,12 +3336,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3309,7 +3358,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -3605,7 +3654,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3643,7 +3694,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -3674,11 +3725,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -3686,12 +3738,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3706,7 +3760,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3829,7 +3883,9 @@ def save( shift=shift, ) if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) r["format"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4062,7 +4118,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4096,7 +4154,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4127,11 +4185,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -4139,12 +4198,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -4159,7 +4220,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4422,7 +4483,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4456,7 +4519,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4487,11 +4550,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -4499,12 +4563,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -4519,7 +4585,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4537,7 +4603,9 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4776,7 +4844,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4810,7 +4880,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4841,11 +4911,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -4853,12 +4924,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -4873,7 +4946,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5191,7 +5264,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5227,7 +5302,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -5258,11 +5333,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -5270,12 +5346,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -5290,7 +5368,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5413,7 +5491,9 @@ def save( shift=shift, ) if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) r["format"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5436,7 +5516,9 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"]) + attrs = frozenset( + ["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"] + ) class OutputRecordSchema(RecordSchema, OutputSchema): @@ -5592,7 +5674,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5626,7 +5710,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -5657,11 +5741,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -5669,12 +5754,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -5689,7 +5776,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5952,7 +6039,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5986,7 +6075,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -6017,11 +6106,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -6029,12 +6119,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -6049,7 +6141,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -6067,7 +6159,9 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6306,7 +6400,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6340,7 +6436,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -6371,11 +6467,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -6383,12 +6480,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -6403,7 +6502,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -6578,7 +6677,10 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, InlineJavascriptRequirement): - return bool(self.class_ == other.class_ and self.expressionLib == other.expressionLib) + return bool( + self.class_ == other.class_ + and self.expressionLib == other.expressionLib + ) return False def __hash__(self) -> int: @@ -6623,7 +6725,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6637,7 +6741,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'InlineJavascriptRequirement'", None, _errors__) + raise ValidationException( + "Trying 'InlineJavascriptRequirement'", None, _errors__ + ) _constructed = cls( expressionLib=expressionLib, extension_fields=extension_fields, @@ -6652,7 +6758,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -6685,11 +6791,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -6697,12 +6804,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -6717,7 +6826,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.expressionLib is not None and "expressionLib" not in r: r["expressionLib"] = save( @@ -6830,12 +6939,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `types`".format(k), + "invalid field `{}`, expected one of: `class`, `types`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -6857,7 +6970,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -6890,11 +7003,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -6902,12 +7016,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -6922,7 +7038,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.types is not None and "types" not in r: r["types"] = save( @@ -6979,7 +7095,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, SecondaryFileSchema): - return bool(self.pattern == other.pattern and self.required == other.required) + return bool( + self.pattern == other.pattern and self.required == other.required + ) return False def __hash__(self) -> int: @@ -7035,12 +7153,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `pattern`, `required`".format(k), + "invalid field `{}`, expected one of: `pattern`, `required`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -7063,7 +7185,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -7094,11 +7216,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -7106,12 +7229,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -7126,7 +7251,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.pattern is not None and "pattern" not in r: r["pattern"] = save( @@ -7209,7 +7334,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, LoadListingRequirement): - return bool(self.class_ == other.class_ and self.loadListing == other.loadListing) + return bool( + self.class_ == other.class_ and self.loadListing == other.loadListing + ) return False def __hash__(self) -> int: @@ -7254,19 +7381,25 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `loadListing`".format(k), + "invalid field `{}`, expected one of: `class`, `loadListing`".format( + k + ), SourceLine(_doc, k, str), ) ) break if _errors__: - raise ValidationException("Trying 'LoadListingRequirement'", None, _errors__) + raise ValidationException( + "Trying 'LoadListingRequirement'", None, _errors__ + ) _constructed = cls( loadListing=loadListing, extension_fields=extension_fields, @@ -7281,7 +7414,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -7314,11 +7447,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -7326,12 +7460,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -7346,7 +7482,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -7410,7 +7546,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, EnvironmentDef): - return bool(self.envName == other.envName and self.envValue == other.envValue) + return bool( + self.envName == other.envName and self.envValue == other.envValue + ) return False def __hash__(self) -> int: @@ -7463,12 +7601,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `envName`, `envValue`".format(k), + "invalid field `{}`, expected one of: `envName`, `envValue`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -7491,7 +7633,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -7522,11 +7664,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -7534,12 +7677,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -7554,7 +7699,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.envName is not None and "envName" not in r: r["envName"] = save( @@ -7848,7 +7993,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -7883,7 +8030,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -7914,11 +8061,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -7926,12 +8074,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -7946,7 +8096,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8257,7 +8407,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8289,7 +8441,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -8320,11 +8472,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -8332,12 +8485,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -8352,7 +8507,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8511,12 +8666,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `inputBinding`".format(k), + "invalid field `{}`, expected one of: `inputBinding`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -8538,7 +8697,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -8569,11 +8728,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -8581,12 +8741,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -8601,7 +8763,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -8907,7 +9069,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8921,7 +9085,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandInputRecordField'", None, _errors__) + raise ValidationException( + "Trying 'CommandInputRecordField'", None, _errors__ + ) _constructed = cls( doc=doc, name=name, @@ -8946,7 +9112,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -8977,11 +9143,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -8989,12 +9156,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -9009,7 +9178,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9132,7 +9301,9 @@ def save( shift=shift, ) if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) r["format"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9234,7 +9405,9 @@ def save( ) -class CommandInputRecordSchema(InputRecordSchema, CommandInputSchema, CommandLineBindable): +class CommandInputRecordSchema( + InputRecordSchema, CommandInputSchema, CommandLineBindable +): def __init__( self, type: Any, @@ -9275,7 +9448,9 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash((self.fields, self.type, self.label, self.doc, self.name, self.inputBinding)) + return hash( + (self.fields, self.type, self.label, self.doc, self.name, self.inputBinding) + ) @classmethod def fromDoc( @@ -9408,7 +9583,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9422,7 +9599,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandInputRecordSchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandInputRecordSchema'", None, _errors__ + ) _constructed = cls( fields=fields, type=type, @@ -9443,7 +9622,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -9474,11 +9653,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -9486,12 +9666,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -9506,7 +9688,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9820,7 +10002,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9834,7 +10018,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandInputEnumSchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandInputEnumSchema'", None, _errors__ + ) _constructed = cls( symbols=symbols, type=type, @@ -9855,7 +10041,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -9886,11 +10072,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -9898,12 +10085,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -9918,7 +10107,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9936,7 +10125,9 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10046,7 +10237,9 @@ def save( attrs = frozenset(["symbols", "type", "label", "doc", "name", "inputBinding"]) -class CommandInputArraySchema(InputArraySchema, CommandInputSchema, CommandLineBindable): +class CommandInputArraySchema( + InputArraySchema, CommandInputSchema, CommandLineBindable +): def __init__( self, items: Any, @@ -10087,7 +10280,9 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash((self.items, self.type, self.label, self.doc, self.name, self.inputBinding)) + return hash( + (self.items, self.type, self.label, self.doc, self.name, self.inputBinding) + ) @classmethod def fromDoc( @@ -10217,7 +10412,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10231,7 +10428,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandInputArraySchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandInputArraySchema'", None, _errors__ + ) _constructed = cls( items=items, type=type, @@ -10252,7 +10451,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -10283,11 +10482,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -10295,12 +10495,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -10315,7 +10517,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10676,7 +10878,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10690,7 +10894,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputRecordField'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputRecordField'", None, _errors__ + ) _constructed = cls( doc=doc, name=name, @@ -10713,7 +10919,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -10744,11 +10950,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -10756,12 +10963,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -10776,7 +10985,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10899,7 +11108,9 @@ def save( shift=shift, ) if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) r["format"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11110,7 +11321,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11124,7 +11337,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputRecordSchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputRecordSchema'", None, _errors__ + ) _constructed = cls( fields=fields, type=type, @@ -11144,7 +11359,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -11175,11 +11390,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -11187,12 +11403,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -11207,7 +11425,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -11470,7 +11688,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11484,7 +11704,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputEnumSchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputEnumSchema'", None, _errors__ + ) _constructed = cls( symbols=symbols, type=type, @@ -11504,7 +11726,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -11535,11 +11757,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -11547,12 +11770,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -11567,7 +11792,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -11585,7 +11810,9 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11824,7 +12051,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11838,7 +12067,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputArraySchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputArraySchema'", None, _errors__ + ) _constructed = cls( items=items, type=type, @@ -11858,7 +12089,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -11889,11 +12120,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -11901,12 +12133,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -11921,7 +12155,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -12331,7 +12565,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -12371,7 +12607,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -12387,7 +12623,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -12417,11 +12653,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -12429,12 +12666,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -12449,7 +12688,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -12927,7 +13166,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -12941,7 +13182,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputParameter'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputParameter'", None, _errors__ + ) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -12964,7 +13207,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -12980,7 +13223,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -13010,11 +13253,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -13022,12 +13266,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -13042,7 +13288,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -13635,7 +13881,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -13680,7 +13928,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -13696,7 +13944,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -13728,11 +13976,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -13740,12 +13989,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -13760,7 +14011,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -13904,7 +14155,9 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14357,7 +14610,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -14391,7 +14646,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -14424,11 +14679,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -14436,12 +14692,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -14456,7 +14714,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.dockerPull is not None and "dockerPull" not in r: r["dockerPull"] = save( @@ -14675,12 +14933,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `packages`".format(k), + "invalid field `{}`, expected one of: `class`, `packages`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -14702,7 +14964,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -14735,11 +14997,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -14747,12 +15010,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -14767,7 +15032,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.packages is not None and "packages" not in r: r["packages"] = save( @@ -14904,7 +15169,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -14935,7 +15202,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -14966,11 +15233,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -14978,12 +15246,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -14998,7 +15268,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.package is not None and "package" not in r: r["package"] = save( @@ -15179,7 +15449,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -15210,7 +15482,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -15241,11 +15513,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -15253,12 +15526,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -15273,7 +15548,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.entryname is not None and "entryname" not in r: r["entryname"] = save( @@ -15417,19 +15692,25 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `listing`".format(k), + "invalid field `{}`, expected one of: `class`, `listing`".format( + k + ), SourceLine(_doc, k, str), ) ) break if _errors__: - raise ValidationException("Trying 'InitialWorkDirRequirement'", None, _errors__) + raise ValidationException( + "Trying 'InitialWorkDirRequirement'", None, _errors__ + ) _constructed = cls( listing=listing, extension_fields=extension_fields, @@ -15444,7 +15725,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -15477,11 +15758,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -15489,12 +15771,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -15509,7 +15793,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.listing is not None and "listing" not in r: r["listing"] = save( @@ -15613,12 +15897,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `envDef`".format(k), + "invalid field `{}`, expected one of: `class`, `envDef`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -15640,7 +15928,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -15673,11 +15961,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -15685,12 +15974,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -15705,7 +15996,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.envDef is not None and "envDef" not in r: r["envDef"] = save( @@ -15797,7 +16088,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -15809,7 +16102,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'ShellCommandRequirement'", None, _errors__) + raise ValidationException( + "Trying 'ShellCommandRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -15823,7 +16118,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -15856,11 +16151,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -15868,12 +16164,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -15888,7 +16186,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -16154,7 +16452,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -16190,7 +16490,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -16223,11 +16523,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -16235,12 +16536,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -16255,7 +16558,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.coresMin is not None and "coresMin" not in r: r["coresMin"] = save( @@ -16483,7 +16786,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, WorkReuse): - return bool(self.class_ == other.class_ and self.enableReuse == other.enableReuse) + return bool( + self.class_ == other.class_ and self.enableReuse == other.enableReuse + ) return False def __hash__(self) -> int: @@ -16525,12 +16830,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `enableReuse`".format(k), + "invalid field `{}`, expected one of: `class`, `enableReuse`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -16552,7 +16861,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -16585,11 +16894,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -16597,12 +16907,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -16617,7 +16929,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.enableReuse is not None and "enableReuse" not in r: r["enableReuse"] = save( @@ -16692,7 +17004,10 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, NetworkAccess): - return bool(self.class_ == other.class_ and self.networkAccess == other.networkAccess) + return bool( + self.class_ == other.class_ + and self.networkAccess == other.networkAccess + ) return False def __hash__(self) -> int: @@ -16734,7 +17049,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -16763,7 +17080,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -16796,11 +17113,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -16808,12 +17126,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -16828,7 +17148,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.networkAccess is not None and "networkAccess" not in r: r["networkAccess"] = save( @@ -16918,7 +17238,10 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, InplaceUpdateRequirement): - return bool(self.class_ == other.class_ and self.inplaceUpdate == other.inplaceUpdate) + return bool( + self.class_ == other.class_ + and self.inplaceUpdate == other.inplaceUpdate + ) return False def __hash__(self) -> int: @@ -16960,7 +17283,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -16974,7 +17299,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'InplaceUpdateRequirement'", None, _errors__) + raise ValidationException( + "Trying 'InplaceUpdateRequirement'", None, _errors__ + ) _constructed = cls( inplaceUpdate=inplaceUpdate, extension_fields=extension_fields, @@ -16989,7 +17316,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -17022,11 +17349,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -17034,12 +17362,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -17054,7 +17384,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.inplaceUpdate is not None and "inplaceUpdate" not in r: r["inplaceUpdate"] = save( @@ -17121,7 +17451,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, ToolTimeLimit): - return bool(self.class_ == other.class_ and self.timelimit == other.timelimit) + return bool( + self.class_ == other.class_ and self.timelimit == other.timelimit + ) return False def __hash__(self) -> int: @@ -17163,12 +17495,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `timelimit`".format(k), + "invalid field `{}`, expected one of: `class`, `timelimit`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -17190,7 +17526,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -17223,11 +17559,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -17235,12 +17572,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -17255,7 +17594,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.timelimit is not None and "timelimit" not in r: r["timelimit"] = save( @@ -17495,7 +17834,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17509,7 +17850,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'ExpressionToolOutputParameter'", None, _errors__) + raise ValidationException( + "Trying 'ExpressionToolOutputParameter'", None, _errors__ + ) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -17531,7 +17874,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -17547,7 +17890,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -17577,11 +17920,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -17589,12 +17933,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -17609,7 +17955,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -17755,7 +18101,9 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"]) + attrs = frozenset( + ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] + ) class WorkflowInputParameter(InputParameter): @@ -18051,7 +18399,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -18065,7 +18415,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'WorkflowInputParameter'", None, _errors__) + raise ValidationException( + "Trying 'WorkflowInputParameter'", None, _errors__ + ) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -18091,7 +18443,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -18107,7 +18459,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -18137,11 +18489,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -18149,12 +18502,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -18169,7 +18524,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18677,7 +19032,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -18715,7 +19072,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -18731,7 +19088,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -18763,11 +19120,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -18775,12 +19133,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -18795,7 +19155,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18939,7 +19299,9 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19256,7 +19618,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19270,7 +19634,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'WorkflowOutputParameter'", None, _errors__) + raise ValidationException( + "Trying 'WorkflowOutputParameter'", None, _errors__ + ) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -19294,7 +19660,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -19310,7 +19676,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -19340,11 +19706,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -19352,12 +19719,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -19372,7 +19741,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -19489,7 +19858,9 @@ def save( shift=shift, ) if self.outputSource is not None and "outputSource" not in r: - u = save_relative_uri(self.outputSource, str(self.id), False, 1, relative_uris) + u = save_relative_uri( + self.outputSource, str(self.id), False, 1, relative_uris + ) r["outputSource"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19850,7 +20221,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19887,7 +20260,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -19903,7 +20276,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -19933,11 +20306,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -19945,12 +20319,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -19965,7 +20341,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -20228,7 +20604,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -20256,7 +20634,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -20272,7 +20650,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -20302,11 +20680,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -20314,12 +20693,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -20334,7 +20715,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -20633,7 +21014,7 @@ def fromDoc( else: hints = None - subscope_baseuri = expand_url("run", baseuri, loadingOptions, True) + subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) try: run = load_field( _doc.get("run"), @@ -20689,7 +21070,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -20728,7 +21111,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -20744,7 +21127,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -20774,11 +21157,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -20786,12 +21170,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -20806,7 +21192,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -20974,7 +21360,9 @@ def save( shift=shift, ) if self.scatterMethod is not None and "scatterMethod" not in r: - u = save_relative_uri(self.scatterMethod, str(self.id), False, None, relative_uris) + u = save_relative_uri( + self.scatterMethod, str(self.id), False, None, relative_uris + ) r["scatterMethod"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21312,7 +21700,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -21350,7 +21740,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -21366,7 +21756,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -21398,11 +21788,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -21410,12 +21801,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -21430,7 +21823,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -21574,7 +21967,9 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21686,7 +22081,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -21698,7 +22095,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'SubworkflowFeatureRequirement'", None, _errors__) + raise ValidationException( + "Trying 'SubworkflowFeatureRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -21712,7 +22111,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -21745,11 +22144,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -21757,12 +22157,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -21777,7 +22179,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -21843,7 +22245,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -21855,7 +22259,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'ScatterFeatureRequirement'", None, _errors__) + raise ValidationException( + "Trying 'ScatterFeatureRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -21869,7 +22275,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -21902,11 +22308,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -21914,12 +22321,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -21934,7 +22343,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -22000,7 +22409,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -22012,7 +22423,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'MultipleInputFeatureRequirement'", None, _errors__) + raise ValidationException( + "Trying 'MultipleInputFeatureRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -22026,7 +22439,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -22059,11 +22472,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -22071,12 +22485,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -22091,7 +22507,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -22157,7 +22573,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -22169,7 +22587,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'StepInputExpressionRequirement'", None, _errors__) + raise ValidationException( + "Trying 'StepInputExpressionRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -22183,7 +22603,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -22216,11 +22636,12 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -22228,12 +22649,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -22248,7 +22671,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -22883,14 +23306,16 @@ def save( array_of_union_of_FileLoader_or_DirectoryLoader, ) ) -secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = ( - _SecondaryDSLLoader(union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader) +secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _SecondaryDSLLoader( + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader ) uri_union_of_None_type_or_strtype_True_False_None = _URILoader( union_of_None_type_or_strtype, True, False, None ) Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") -uri_Directory_classLoader_False_True_None = _URILoader(Directory_classLoader, False, True, None) +uri_Directory_classLoader_False_True_None = _URILoader( + Directory_classLoader, False, True, None +) union_of_None_type_or_booltype = _UnionLoader( ( None_type, @@ -22904,13 +23329,11 @@ def save( ) ) array_of_SecondaryFileSchemaLoader = _ArrayLoader(SecondaryFileSchemaLoader) -union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = ( - _UnionLoader( - ( - None_type, - SecondaryFileSchemaLoader, - array_of_SecondaryFileSchemaLoader, - ) +union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _UnionLoader( + ( + None_type, + SecondaryFileSchemaLoader, + array_of_SecondaryFileSchemaLoader, ) ) secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _SecondaryDSLLoader( @@ -22924,13 +23347,11 @@ def save( ExpressionLoader, ) ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = ( - _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, - True, - False, - None, - ) +uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, + True, + False, + None, ) union_of_None_type_or_strtype_or_ExpressionLoader = _UnionLoader( ( @@ -23028,15 +23449,13 @@ def save( WorkflowInputParameterLoader, ) ) -array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = _ArrayLoader( - union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader +array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = ( + _ArrayLoader(union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader) ) -idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = ( - _IdMapLoader( - array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader, - "id", - "type", - ) +idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = _IdMapLoader( + array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader, + "id", + "type", ) union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader = _UnionLoader( ( @@ -23240,8 +23659,10 @@ def save( array_of_CommandInputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" +idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" + ) ) union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( ( @@ -23282,8 +23703,10 @@ def save( array_of_CommandOutputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" +idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" + ) ) union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( ( @@ -23335,15 +23758,13 @@ def save( CommandLineBindingLoader, ) ) -array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _ArrayLoader( - union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader +array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( + _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) ) -union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( - _UnionLoader( - ( - None_type, - array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, - ) +union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( + ( + None_type, + array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, ) ) array_of_inttype = _ArrayLoader(inttype) @@ -23353,11 +23774,15 @@ def save( array_of_inttype, ) ) -DockerRequirement_classLoader = _EnumLoader(("DockerRequirement",), "DockerRequirement_class") +DockerRequirement_classLoader = _EnumLoader( + ("DockerRequirement",), "DockerRequirement_class" +) uri_DockerRequirement_classLoader_False_True_None = _URILoader( DockerRequirement_classLoader, False, True, None ) -SoftwareRequirement_classLoader = _EnumLoader(("SoftwareRequirement",), "SoftwareRequirement_class") +SoftwareRequirement_classLoader = _EnumLoader( + ("SoftwareRequirement",), "SoftwareRequirement_class" +) uri_SoftwareRequirement_classLoader_False_True_None = _URILoader( SoftwareRequirement_classLoader, False, True, None ) @@ -23393,7 +23818,9 @@ def save( ExpressionLoader, ) ) -EnvVarRequirement_classLoader = _EnumLoader(("EnvVarRequirement",), "EnvVarRequirement_class") +EnvVarRequirement_classLoader = _EnumLoader( + ("EnvVarRequirement",), "EnvVarRequirement_class" +) uri_EnvVarRequirement_classLoader_False_True_None = _URILoader( EnvVarRequirement_classLoader, False, True, None ) @@ -23407,12 +23834,16 @@ def save( uri_ShellCommandRequirement_classLoader_False_True_None = _URILoader( ShellCommandRequirement_classLoader, False, True, None ) -ResourceRequirement_classLoader = _EnumLoader(("ResourceRequirement",), "ResourceRequirement_class") +ResourceRequirement_classLoader = _EnumLoader( + ("ResourceRequirement",), "ResourceRequirement_class" +) uri_ResourceRequirement_classLoader_False_True_None = _URILoader( ResourceRequirement_classLoader, False, True, None ) WorkReuse_classLoader = _EnumLoader(("WorkReuse",), "WorkReuse_class") -uri_WorkReuse_classLoader_False_True_None = _URILoader(WorkReuse_classLoader, False, True, None) +uri_WorkReuse_classLoader_False_True_None = _URILoader( + WorkReuse_classLoader, False, True, None +) union_of_booltype_or_ExpressionLoader = _UnionLoader( ( booltype, @@ -23453,7 +23884,9 @@ def save( idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader( array_of_WorkflowInputParameterLoader, "id", "type" ) -array_of_ExpressionToolOutputParameterLoader = _ArrayLoader(ExpressionToolOutputParameterLoader) +array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( + ExpressionToolOutputParameterLoader +) idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( array_of_ExpressionToolOutputParameterLoader, "id", "type" ) @@ -23485,11 +23918,13 @@ def save( union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) ) -uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = _URILoader( - union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, - True, - False, - None, +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = ( + _URILoader( + union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, + True, + False, + None, + ) ) array_of_Any_type = _ArrayLoader(Any_type) union_of_None_type_or_array_of_Any_type = _UnionLoader( @@ -23501,12 +23936,14 @@ def save( idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( union_of_None_type_or_array_of_Any_type, "class", "None" ) -union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( - ( - strtype, - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, +union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( + _UnionLoader( + ( + strtype, + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + ) ) ) uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None = _URILoader( @@ -23528,7 +23965,9 @@ def save( union_of_None_type_or_ScatterMethodLoader, False, True, None ) Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") -uri_Workflow_classLoader_False_True_None = _URILoader(Workflow_classLoader, False, True, None) +uri_Workflow_classLoader_False_True_None = _URILoader( + Workflow_classLoader, False, True, None +) array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( array_of_WorkflowOutputParameterLoader, "id", "type" @@ -23569,8 +24008,10 @@ def save( WorkflowLoader, ) ) -array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _ArrayLoader( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader +array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( + _ArrayLoader( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader + ) ) union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( ( diff --git a/schema_salad/tests/cwl_v1_2.py b/schema_salad/tests/cwl_v1_2.py index 6e482dbd4..331864dde 100644 --- a/schema_salad/tests/cwl_v1_2.py +++ b/schema_salad/tests/cwl_v1_2.py @@ -1290,9 +1290,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -1536,9 +1537,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -1804,9 +1806,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -2043,9 +2046,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -2565,9 +2569,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -3089,9 +3094,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -3365,9 +3371,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -3766,9 +3773,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -4225,9 +4233,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -4589,9 +4598,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -4949,9 +4959,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -5364,9 +5375,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -5771,9 +5783,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -6135,9 +6148,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -6495,9 +6509,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -6812,9 +6827,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -7028,9 +7044,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -7257,9 +7274,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -7487,9 +7505,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -7703,9 +7722,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -8099,9 +8119,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -8509,9 +8530,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -8764,9 +8786,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -9178,9 +9201,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -9687,9 +9711,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -10105,9 +10130,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -10514,9 +10540,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -10975,9 +11002,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -11414,9 +11442,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -11780,9 +11809,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -12142,9 +12172,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -12668,9 +12699,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -13267,9 +13299,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -14012,9 +14045,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -14730,9 +14764,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -15047,9 +15082,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -15282,9 +15318,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -15565,9 +15602,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -15810,9 +15848,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -16012,9 +16051,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -16201,9 +16241,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -16577,9 +16618,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -16947,9 +16989,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -17165,9 +17208,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -17400,9 +17444,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -17609,9 +17654,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -17969,9 +18015,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -18537,9 +18584,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -19190,9 +19238,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -19817,9 +19866,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -20526,9 +20576,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -20921,9 +20972,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -21444,9 +21496,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -22125,9 +22178,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -22496,9 +22550,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -22659,9 +22714,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -22822,9 +22878,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -22985,9 +23042,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -23398,9 +23456,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -23953,9 +24012,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, @@ -24490,9 +24550,10 @@ def save( if getattr(self, key) is not None: if key != 'class': line = doc.lc.data[key][0] + shift - while line in inserted_line_info: - line += 1 - shift += 1 + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 saved_val = save( getattr(self, key), top=False, diff --git a/schema_salad/tests/test_line_numbers.py b/schema_salad/tests/test_line_numbers.py index 30ca0dd10..ea090d2f9 100644 --- a/schema_salad/tests/test_line_numbers.py +++ b/schema_salad/tests/test_line_numbers.py @@ -118,14 +118,11 @@ def load_document_by_uri(path: str) -> Any: """ Takes in a path and loads it via the python codegen. """ - if isinstance(path, str): - uri = urlparse(path) - if not uri.scheme or uri.scheme == "file": - real_path = Path(unquote_plus(uri.path)).resolve().as_uri() - else: - real_path = path + uri = urlparse(path) + if not uri.scheme or uri.scheme == "file": + real_path = Path(unquote_plus(uri.path)).resolve().as_uri() else: - real_path = path.resolve().as_uri() + real_path = path baseuri = str(real_path) From f85ed3cb103bd4808fc8eb47e1569f9472489b01 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Mon, 5 Jun 2023 16:55:52 -0600 Subject: [PATCH 38/44] running make cleanup --- schema_salad/metaschema.py | 248 ++---- schema_salad/tests/cwl_v1_0.py | 1139 ++++++++++----------------- schema_salad/tests/cwl_v1_1.py | 1347 ++++++++++++-------------------- 3 files changed, 986 insertions(+), 1748 deletions(-) diff --git a/schema_salad/metaschema.py b/schema_salad/metaschema.py index 04b472525..8e57efa31 100644 --- a/schema_salad/metaschema.py +++ b/schema_salad/metaschema.py @@ -1137,9 +1137,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, RecordField): return bool( - self.doc == other.doc - and self.name == other.name - and self.type == other.type + self.doc == other.doc and self.name == other.name and self.type == other.type ) return False @@ -1223,16 +1221,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `doc`, `name`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1257,7 +1251,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -1288,7 +1282,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1301,14 +1295,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1323,7 +1315,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1472,16 +1464,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `fields`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1504,7 +1492,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -1535,7 +1523,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1548,14 +1536,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1570,7 +1556,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -1739,9 +1725,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -1773,7 +1757,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -1804,7 +1788,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1817,14 +1801,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1839,7 +1821,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1857,9 +1839,7 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1981,16 +1961,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `items`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -2013,7 +1989,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -2044,7 +2020,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -2057,14 +2033,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -2079,7 +2053,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.items is not None and "items" not in r: u = save_relative_uri(self.items, base_url, False, 2, relative_uris) @@ -2422,9 +2396,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2463,7 +2435,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -2494,7 +2466,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -2507,14 +2479,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -2529,7 +2499,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self._id is not None and "_id" not in r: u = save_relative_uri(self._id, base_url, True, None, relative_uris) @@ -2860,9 +2830,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2892,7 +2860,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -2923,7 +2891,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -2936,14 +2904,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -2958,12 +2924,10 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.specializeFrom is not None and "specializeFrom" not in r: - u = save_relative_uri( - self.specializeFrom, base_url, False, 1, relative_uris - ) + u = save_relative_uri(self.specializeFrom, base_url, False, 1, relative_uris) r["specializeFrom"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3063,9 +3027,7 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash( - (self.doc, self.name, self.type, self.jsonldPredicate, self.default) - ) + return hash((self.doc, self.name, self.type, self.jsonldPredicate, self.default)) @classmethod def fromDoc( @@ -3180,9 +3142,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3216,7 +3176,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -3247,7 +3207,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -3260,14 +3220,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3282,7 +3240,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3732,9 +3690,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3776,7 +3732,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -3807,7 +3763,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -3820,14 +3776,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3842,7 +3796,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3944,9 +3898,7 @@ def save( shift=shift, ) if self.docParent is not None and "docParent" not in r: - u = save_relative_uri( - self.docParent, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docParent, str(self.name), False, None, relative_uris) r["docParent"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3961,9 +3913,7 @@ def save( shift=shift, ) if self.docChild is not None and "docChild" not in r: - u = save_relative_uri( - self.docChild, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docChild, str(self.name), False, None, relative_uris) r["docChild"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3978,9 +3928,7 @@ def save( shift=shift, ) if self.docAfter is not None and "docAfter" not in r: - u = save_relative_uri( - self.docAfter, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docAfter, str(self.name), False, None, relative_uris) r["docAfter"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4416,9 +4364,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4458,7 +4404,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -4489,7 +4435,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -4502,14 +4448,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4524,7 +4468,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4563,9 +4507,7 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4622,9 +4564,7 @@ def save( shift=shift, ) if self.docParent is not None and "docParent" not in r: - u = save_relative_uri( - self.docParent, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docParent, str(self.name), False, None, relative_uris) r["docParent"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4639,9 +4579,7 @@ def save( shift=shift, ) if self.docChild is not None and "docChild" not in r: - u = save_relative_uri( - self.docChild, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docChild, str(self.name), False, None, relative_uris) r["docChild"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4656,9 +4594,7 @@ def save( shift=shift, ) if self.docAfter is not None and "docAfter" not in r: - u = save_relative_uri( - self.docAfter, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docAfter, str(self.name), False, None, relative_uris) r["docAfter"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4966,9 +4902,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5004,7 +4938,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -5035,7 +4969,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -5048,14 +4982,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -5070,7 +5002,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5130,9 +5062,7 @@ def save( shift=shift, ) if self.docParent is not None and "docParent" not in r: - u = save_relative_uri( - self.docParent, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docParent, str(self.name), False, None, relative_uris) r["docParent"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5147,9 +5077,7 @@ def save( shift=shift, ) if self.docChild is not None and "docChild" not in r: - u = save_relative_uri( - self.docChild, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docChild, str(self.name), False, None, relative_uris) r["docChild"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5164,9 +5092,7 @@ def save( shift=shift, ) if self.docAfter is not None and "docAfter" not in r: - u = save_relative_uri( - self.docAfter, str(self.name), False, None, relative_uris - ) + u = save_relative_uri(self.docAfter, str(self.name), False, None, relative_uris) r["docAfter"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5210,9 +5136,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - ["name", "inVocab", "doc", "docParent", "docChild", "docAfter", "type"] - ) + attrs = frozenset(["name", "inVocab", "doc", "docParent", "docChild", "docAfter", "type"]) _vocab = { @@ -5444,17 +5368,15 @@ def save( ) Documentation_nameLoader = _EnumLoader(("documentation",), "Documentation_name") typedsl_Documentation_nameLoader_2 = _TypeDSLLoader(Documentation_nameLoader, 2) -union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = ( - _UnionLoader( - ( - SaladRecordSchemaLoader, - SaladEnumSchemaLoader, - DocumentationLoader, - ) +union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _UnionLoader( + ( + SaladRecordSchemaLoader, + SaladEnumSchemaLoader, + DocumentationLoader, ) ) -array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _ArrayLoader( - union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader +array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = ( + _ArrayLoader(union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader) ) union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader_or_array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _UnionLoader( ( diff --git a/schema_salad/tests/cwl_v1_0.py b/schema_salad/tests/cwl_v1_0.py index 60898f1e6..647680aa5 100644 --- a/schema_salad/tests/cwl_v1_0.py +++ b/schema_salad/tests/cwl_v1_0.py @@ -1133,9 +1133,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, RecordField): return bool( - self.name == other.name - and self.doc == other.doc - and self.type == other.type + self.name == other.name and self.doc == other.doc and self.type == other.type ) return False @@ -1219,16 +1217,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `doc`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `name`, `doc`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1253,7 +1247,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -1284,7 +1278,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1297,14 +1291,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1319,7 +1311,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1468,16 +1460,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `fields`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1500,7 +1488,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -1531,7 +1519,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1544,14 +1532,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1566,7 +1552,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -1702,16 +1688,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `symbols`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1734,7 +1716,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -1765,7 +1747,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1778,14 +1760,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1800,7 +1780,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -1925,16 +1905,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `items`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1957,7 +1933,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -1988,7 +1964,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -2001,14 +1977,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -2023,7 +1997,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -2443,9 +2417,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2484,7 +2456,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -2517,7 +2489,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -2530,14 +2502,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -2552,7 +2522,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -2878,9 +2848,7 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash( - (self.class_, self.location, self.path, self.basename, self.listing) - ) + return hash((self.class_, self.location, self.path, self.basename, self.listing)) @classmethod def fromDoc( @@ -2975,9 +2943,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3009,7 +2975,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -3042,7 +3008,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -3055,14 +3021,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3077,7 +3041,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -3345,9 +3309,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3381,7 +3343,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -3412,7 +3374,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -3425,14 +3387,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3447,7 +3407,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3692,9 +3652,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3727,7 +3685,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -3758,7 +3716,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -3771,14 +3729,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3793,7 +3749,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4035,9 +3991,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4071,7 +4025,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -4102,7 +4056,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -4115,14 +4069,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4137,7 +4089,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4155,9 +4107,7 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4366,9 +4316,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4400,7 +4348,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -4431,7 +4379,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -4444,14 +4392,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4466,7 +4412,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -4696,9 +4642,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4731,7 +4675,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -4762,7 +4706,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -4775,14 +4719,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4797,7 +4739,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4991,9 +4933,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5024,7 +4964,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -5055,7 +4995,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -5068,14 +5008,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -5090,7 +5028,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -5287,9 +5225,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5321,7 +5257,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -5352,7 +5288,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -5365,14 +5301,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -5387,7 +5321,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -5599,9 +5533,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5633,7 +5565,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -5664,7 +5596,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -5677,14 +5609,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -5699,7 +5629,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -6049,9 +5979,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6089,7 +6017,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -6105,7 +6033,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -6135,7 +6063,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -6148,14 +6076,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6170,7 +6096,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -6581,9 +6507,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6619,7 +6543,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -6635,7 +6559,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -6665,7 +6589,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -6678,14 +6602,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6700,7 +6622,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -6913,10 +6835,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, InlineJavascriptRequirement): - return bool( - self.class_ == other.class_ - and self.expressionLib == other.expressionLib - ) + return bool(self.class_ == other.class_ and self.expressionLib == other.expressionLib) return False def __hash__(self) -> int: @@ -6961,9 +6880,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6977,9 +6894,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'InlineJavascriptRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'InlineJavascriptRequirement'", None, _errors__) _constructed = cls( expressionLib=expressionLib, extension_fields=extension_fields, @@ -6994,7 +6909,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -7027,7 +6942,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -7040,14 +6955,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7062,7 +6975,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.expressionLib is not None and "expressionLib" not in r: r["expressionLib"] = save( @@ -7171,16 +7084,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `types`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `types`".format(k), SourceLine(_doc, k, str), ) ) @@ -7202,7 +7111,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -7235,7 +7144,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -7248,14 +7157,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7270,7 +7177,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.types is not None and "types" not in r: r["types"] = save( @@ -7334,9 +7241,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, EnvironmentDef): - return bool( - self.envName == other.envName and self.envValue == other.envValue - ) + return bool(self.envName == other.envName and self.envValue == other.envValue) return False def __hash__(self) -> int: @@ -7389,16 +7294,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `envName`, `envValue`".format( - k - ), + "invalid field `{}`, expected one of: `envName`, `envValue`".format(k), SourceLine(_doc, k, str), ) ) @@ -7421,7 +7322,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -7452,7 +7353,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -7465,14 +7366,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7487,7 +7386,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.envName is not None and "envName" not in r: r["envName"] = save( @@ -7781,9 +7680,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -7818,7 +7715,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -7849,7 +7746,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -7862,14 +7759,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7884,7 +7779,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8174,9 +8069,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8207,7 +8100,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -8238,7 +8131,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -8251,14 +8144,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8273,7 +8164,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.glob is not None and "glob" not in r: r["glob"] = save( @@ -8503,9 +8394,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8519,9 +8408,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandInputRecordField'", None, _errors__ - ) + raise ValidationException("Trying 'CommandInputRecordField'", None, _errors__) _constructed = cls( name=name, doc=doc, @@ -8541,7 +8428,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -8572,7 +8459,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -8585,14 +8472,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8607,7 +8492,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -8852,9 +8737,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8868,9 +8751,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandInputRecordSchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandInputRecordSchema'", None, _errors__) _constructed = cls( fields=fields, type=type, @@ -8889,7 +8770,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -8920,7 +8801,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -8933,14 +8814,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8955,7 +8834,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9197,9 +9076,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9213,9 +9090,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandInputEnumSchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandInputEnumSchema'", None, _errors__) _constructed = cls( symbols=symbols, type=type, @@ -9235,7 +9110,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -9266,7 +9141,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -9279,14 +9154,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -9301,7 +9174,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9319,9 +9192,7 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9530,9 +9401,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9546,9 +9415,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandInputArraySchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandInputArraySchema'", None, _errors__) _constructed = cls( items=items, type=type, @@ -9566,7 +9433,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -9597,7 +9464,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -9610,14 +9477,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -9632,7 +9497,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -9862,9 +9727,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9878,9 +9741,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputRecordField'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputRecordField'", None, _errors__) _constructed = cls( name=name, doc=doc, @@ -9899,7 +9760,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -9930,7 +9791,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -9943,14 +9804,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -9965,7 +9824,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10189,9 +10048,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10205,9 +10062,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputRecordSchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputRecordSchema'", None, _errors__) _constructed = cls( fields=fields, type=type, @@ -10226,7 +10081,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -10257,7 +10112,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -10270,14 +10125,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -10292,7 +10145,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10504,9 +10357,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10520,9 +10371,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputEnumSchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputEnumSchema'", None, _errors__) _constructed = cls( symbols=symbols, type=type, @@ -10540,7 +10389,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -10571,7 +10420,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -10584,14 +10433,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -10606,7 +10453,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -10818,9 +10665,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10834,9 +10679,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputArraySchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputArraySchema'", None, _errors__) _constructed = cls( items=items, type=type, @@ -10854,7 +10697,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -10885,7 +10728,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -10898,14 +10741,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -10920,7 +10761,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -11274,9 +11115,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11314,7 +11153,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -11330,7 +11169,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -11360,7 +11199,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -11373,14 +11212,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -11395,7 +11232,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -11832,9 +11669,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11848,9 +11683,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputParameter'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputParameter'", None, _errors__) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -11873,7 +11706,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -11889,7 +11722,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -11919,7 +11752,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -11932,14 +11765,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -11954,7 +11785,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -12547,9 +12378,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -12594,7 +12423,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -12610,7 +12439,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -12642,7 +12471,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -12655,14 +12484,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -12677,7 +12504,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -12821,9 +12648,7 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) + u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13258,9 +13083,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -13294,7 +13117,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -13327,7 +13150,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -13340,14 +13163,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -13362,7 +13183,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.dockerPull is not None and "dockerPull" not in r: r["dockerPull"] = save( @@ -13581,16 +13402,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `packages`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `packages`".format(k), SourceLine(_doc, k, str), ) ) @@ -13612,7 +13429,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -13645,7 +13462,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -13658,14 +13475,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -13680,7 +13495,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.packages is not None and "packages" not in r: r["packages"] = save( @@ -13817,9 +13632,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -13850,7 +13663,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -13881,7 +13694,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -13894,14 +13707,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -13916,7 +13727,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.package is not None and "package" not in r: r["package"] = save( @@ -14097,9 +13908,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -14130,7 +13939,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -14161,7 +13970,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -14174,14 +13983,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14196,7 +14003,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.entryname is not None and "entryname" not in r: r["entryname"] = save( @@ -14340,25 +14147,19 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `listing`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `listing`".format(k), SourceLine(_doc, k, str), ) ) break if _errors__: - raise ValidationException( - "Trying 'InitialWorkDirRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'InitialWorkDirRequirement'", None, _errors__) _constructed = cls( listing=listing, extension_fields=extension_fields, @@ -14373,7 +14174,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -14406,7 +14207,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -14419,14 +14220,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14441,7 +14240,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.listing is not None and "listing" not in r: r["listing"] = save( @@ -14545,16 +14344,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `envDef`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `envDef`".format(k), SourceLine(_doc, k, str), ) ) @@ -14576,7 +14371,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -14609,7 +14404,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -14622,14 +14417,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14644,7 +14437,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.envDef is not None and "envDef" not in r: r["envDef"] = save( @@ -14736,9 +14529,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -14750,9 +14541,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'ShellCommandRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'ShellCommandRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -14766,7 +14555,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -14799,7 +14588,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -14812,14 +14601,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14834,7 +14621,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # top refers to the directory level @@ -15100,9 +14887,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -15138,7 +14923,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -15171,7 +14956,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -15184,14 +14969,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15206,7 +14989,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.coresMin is not None and "coresMin" not in r: r["coresMin"] = save( @@ -15630,9 +15413,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -15646,9 +15427,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'ExpressionToolOutputParameter'", None, _errors__ - ) + raise ValidationException("Trying 'ExpressionToolOutputParameter'", None, _errors__) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -15671,7 +15450,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -15687,7 +15466,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -15717,7 +15496,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -15730,14 +15509,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15752,7 +15529,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -16188,9 +15965,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -16228,7 +16003,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -16244,7 +16019,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -16276,7 +16051,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -16289,14 +16064,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -16311,7 +16084,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -16455,9 +16228,7 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) + u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16798,9 +16569,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -16814,9 +16583,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'WorkflowOutputParameter'", None, _errors__ - ) + raise ValidationException("Trying 'WorkflowOutputParameter'", None, _errors__) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -16841,7 +16608,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -16857,7 +16624,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -16887,7 +16654,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -16900,14 +16667,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -16922,7 +16687,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -17060,9 +16825,7 @@ def save( shift=shift, ) if self.outputSource is not None and "outputSource" not in r: - u = save_relative_uri( - self.outputSource, str(self.id), False, 1, relative_uris - ) + u = save_relative_uri(self.outputSource, str(self.id), False, 1, relative_uris) r["outputSource"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -17228,9 +16991,7 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash( - (self.source, self.linkMerge, self.id, self.default, self.valueFrom) - ) + return hash((self.source, self.linkMerge, self.id, self.default, self.valueFrom)) @classmethod def fromDoc( @@ -17348,9 +17109,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17384,7 +17143,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -17400,7 +17159,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -17430,7 +17189,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -17443,14 +17202,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -17465,7 +17222,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -17650,9 +17407,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17680,7 +17435,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -17696,7 +17451,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -17726,7 +17481,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -17739,14 +17494,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -17761,7 +17514,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18114,9 +17867,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -18155,7 +17906,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -18171,7 +17922,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -18201,7 +17952,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -18214,14 +17965,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -18236,7 +17985,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18404,9 +18153,7 @@ def save( shift=shift, ) if self.scatterMethod is not None and "scatterMethod" not in r: - u = save_relative_uri( - self.scatterMethod, str(self.id), False, None, relative_uris - ) + u = save_relative_uri(self.scatterMethod, str(self.id), False, None, relative_uris) r["scatterMethod"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18744,9 +18491,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -18784,7 +18529,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -18800,7 +18545,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -18832,7 +18577,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -18845,14 +18590,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -18867,7 +18610,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -19011,9 +18754,7 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) + u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19125,9 +18866,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19139,9 +18878,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'SubworkflowFeatureRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'SubworkflowFeatureRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -19155,7 +18892,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -19188,7 +18925,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -19201,14 +18938,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -19223,7 +18958,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # top refers to the directory level @@ -19289,9 +19024,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19303,9 +19036,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'ScatterFeatureRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'ScatterFeatureRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -19319,7 +19050,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -19352,7 +19083,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -19365,14 +19096,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -19387,7 +19116,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # top refers to the directory level @@ -19453,9 +19182,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19467,9 +19194,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'MultipleInputFeatureRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'MultipleInputFeatureRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -19483,7 +19208,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -19516,7 +19241,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -19529,14 +19254,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -19551,7 +19274,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # top refers to the directory level @@ -19617,9 +19340,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19631,9 +19352,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'StepInputExpressionRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'StepInputExpressionRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -19647,7 +19366,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -19680,7 +19399,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -19693,14 +19412,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -19715,7 +19432,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # top refers to the directory level @@ -20244,18 +19961,14 @@ def save( union_of_None_type_or_strtype, True, False, None ) Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") -uri_Directory_classLoader_False_True_None = _URILoader( - Directory_classLoader, False, True, None -) +uri_Directory_classLoader_False_True_None = _URILoader(Directory_classLoader, False, True, None) union_of_strtype_or_ExpressionLoader = _UnionLoader( ( strtype, ExpressionLoader, ) ) -array_of_union_of_strtype_or_ExpressionLoader = _ArrayLoader( - union_of_strtype_or_ExpressionLoader -) +array_of_union_of_strtype_or_ExpressionLoader = _ArrayLoader(union_of_strtype_or_ExpressionLoader) union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader = _UnionLoader( ( None_type, @@ -20369,11 +20082,13 @@ def save( ExpressionLoader, ) ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, - True, - False, - None, +uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = ( + _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, + True, + False, + None, + ) ) union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type = _UnionLoader( ( @@ -20504,17 +20219,17 @@ def save( uri_SchemaDefRequirement_classLoader_False_True_None = _URILoader( SchemaDefRequirement_classLoader, False, True, None ) -union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = ( - _UnionLoader( - ( - InputRecordSchemaLoader, - InputEnumSchemaLoader, - InputArraySchemaLoader, - ) +union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = _UnionLoader( + ( + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, ) ) -array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = _ArrayLoader( - union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader +array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = ( + _ArrayLoader( + union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader + ) ) union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype = _UnionLoader( ( @@ -20557,10 +20272,8 @@ def save( array_of_CommandInputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( - _IdMapLoader( - union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" - ) +idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" ) union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( ( @@ -20595,10 +20308,8 @@ def save( array_of_CommandOutputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( - _IdMapLoader( - union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" - ) +idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" ) union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( ( @@ -20651,13 +20362,15 @@ def save( CommandLineBindingLoader, ) ) -array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( - _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) +array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _ArrayLoader( + union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader ) -union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( - ( - None_type, - array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, +union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( + _UnionLoader( + ( + None_type, + array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + ) ) ) array_of_inttype = _ArrayLoader(inttype) @@ -20667,15 +20380,11 @@ def save( array_of_inttype, ) ) -DockerRequirement_classLoader = _EnumLoader( - ("DockerRequirement",), "DockerRequirement_class" -) +DockerRequirement_classLoader = _EnumLoader(("DockerRequirement",), "DockerRequirement_class") uri_DockerRequirement_classLoader_False_True_None = _URILoader( DockerRequirement_classLoader, False, True, None ) -SoftwareRequirement_classLoader = _EnumLoader( - ("SoftwareRequirement",), "SoftwareRequirement_class" -) +SoftwareRequirement_classLoader = _EnumLoader(("SoftwareRequirement",), "SoftwareRequirement_class") uri_SoftwareRequirement_classLoader_False_True_None = _URILoader( SoftwareRequirement_classLoader, False, True, None ) @@ -20692,17 +20401,21 @@ def save( uri_InitialWorkDirRequirement_classLoader_False_True_None = _URILoader( InitialWorkDirRequirement_classLoader, False, True, None ) -union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = _UnionLoader( - ( - FileLoader, - DirectoryLoader, - DirentLoader, - strtype, - ExpressionLoader, +union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = ( + _UnionLoader( + ( + FileLoader, + DirectoryLoader, + DirentLoader, + strtype, + ExpressionLoader, + ) ) ) -array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = _ArrayLoader( - union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader +array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = ( + _ArrayLoader( + union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader + ) ) union_of_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader_or_strtype_or_ExpressionLoader = _UnionLoader( ( @@ -20711,9 +20424,7 @@ def save( ExpressionLoader, ) ) -EnvVarRequirement_classLoader = _EnumLoader( - ("EnvVarRequirement",), "EnvVarRequirement_class" -) +EnvVarRequirement_classLoader = _EnumLoader(("EnvVarRequirement",), "EnvVarRequirement_class") uri_EnvVarRequirement_classLoader_False_True_None = _URILoader( EnvVarRequirement_classLoader, False, True, None ) @@ -20727,9 +20438,7 @@ def save( uri_ShellCommandRequirement_classLoader_False_True_None = _URILoader( ShellCommandRequirement_classLoader, False, True, None ) -ResourceRequirement_classLoader = _EnumLoader( - ("ResourceRequirement",), "ResourceRequirement_class" -) +ResourceRequirement_classLoader = _EnumLoader(("ResourceRequirement",), "ResourceRequirement_class") uri_ResourceRequirement_classLoader_False_True_None = _URILoader( ResourceRequirement_classLoader, False, True, None ) @@ -20760,9 +20469,7 @@ def save( uri_ExpressionTool_classLoader_False_True_None = _URILoader( ExpressionTool_classLoader, False, True, None ) -array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( - ExpressionToolOutputParameterLoader -) +array_of_ExpressionToolOutputParameterLoader = _ArrayLoader(ExpressionToolOutputParameterLoader) idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( array_of_ExpressionToolOutputParameterLoader, "id", "type" ) @@ -20794,13 +20501,11 @@ def save( union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) ) -uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = ( - _URILoader( - union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, - True, - False, - None, - ) +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = _URILoader( + union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, + True, + False, + None, ) array_of_Any_type = _ArrayLoader(Any_type) union_of_None_type_or_array_of_Any_type = _UnionLoader( @@ -20812,14 +20517,12 @@ def save( idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( union_of_None_type_or_array_of_Any_type, "class", "None" ) -union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( - _UnionLoader( - ( - strtype, - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, - ) +union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( + ( + strtype, + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, ) ) uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None = _URILoader( @@ -20841,9 +20544,7 @@ def save( union_of_None_type_or_ScatterMethodLoader, False, True, None ) Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") -uri_Workflow_classLoader_False_True_None = _URILoader( - Workflow_classLoader, False, True, None -) +uri_Workflow_classLoader_False_True_None = _URILoader(Workflow_classLoader, False, True, None) array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( array_of_WorkflowOutputParameterLoader, "id", "type" @@ -20884,10 +20585,8 @@ def save( WorkflowLoader, ) ) -array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( - _ArrayLoader( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader - ) +array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _ArrayLoader( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader ) union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( ( diff --git a/schema_salad/tests/cwl_v1_1.py b/schema_salad/tests/cwl_v1_1.py index bdceca981..84b0ef910 100644 --- a/schema_salad/tests/cwl_v1_1.py +++ b/schema_salad/tests/cwl_v1_1.py @@ -1137,9 +1137,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, RecordField): return bool( - self.doc == other.doc - and self.name == other.name - and self.type == other.type + self.doc == other.doc and self.name == other.name and self.type == other.type ) return False @@ -1223,16 +1221,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `doc`, `name`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1257,7 +1251,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -1288,7 +1282,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1301,14 +1295,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1323,7 +1315,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1472,16 +1464,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `fields`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1504,7 +1492,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -1535,7 +1523,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1548,14 +1536,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1570,7 +1556,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -1706,16 +1692,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `symbols`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1738,7 +1720,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -1769,7 +1751,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1782,14 +1764,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -1804,7 +1784,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -1929,16 +1909,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format( - k - ), + "invalid field `{}`, expected one of: `items`, `type`".format(k), SourceLine(_doc, k, str), ) ) @@ -1961,7 +1937,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -1992,7 +1968,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -2005,14 +1981,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -2027,7 +2001,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.items is not None and "items" not in r: r["items"] = save( @@ -2447,9 +2421,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2488,7 +2460,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -2521,7 +2493,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -2534,14 +2506,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -2556,7 +2526,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -2882,9 +2852,7 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash( - (self.class_, self.location, self.path, self.basename, self.listing) - ) + return hash((self.class_, self.location, self.path, self.basename, self.listing)) @classmethod def fromDoc( @@ -2979,9 +2947,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3013,7 +2979,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -3046,7 +3012,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -3059,14 +3025,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3081,7 +3045,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -3261,16 +3225,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `loadContents`".format( - k - ), + "invalid field `{}`, expected one of: `loadContents`".format(k), SourceLine(_doc, k, str), ) ) @@ -3292,7 +3252,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -3323,7 +3283,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -3336,14 +3296,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3358,7 +3316,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -3654,9 +3612,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3694,7 +3650,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -3725,7 +3681,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -3738,14 +3694,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -3760,7 +3714,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3883,9 +3837,7 @@ def save( shift=shift, ) if self.format is not None and "format" not in r: - u = save_relative_uri( - self.format, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.format, str(self.name), True, None, relative_uris) r["format"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4118,9 +4070,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4154,7 +4104,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -4185,7 +4135,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -4198,14 +4148,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4220,7 +4168,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4483,9 +4431,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4519,7 +4465,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -4550,7 +4496,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -4563,14 +4509,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4585,7 +4529,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4603,9 +4547,7 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4844,9 +4786,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4880,7 +4820,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -4911,7 +4851,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -4924,14 +4864,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -4946,7 +4884,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5264,9 +5202,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5302,7 +5238,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -5333,7 +5269,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -5346,14 +5282,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -5368,7 +5302,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5491,9 +5425,7 @@ def save( shift=shift, ) if self.format is not None and "format" not in r: - u = save_relative_uri( - self.format, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.format, str(self.name), True, None, relative_uris) r["format"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5516,9 +5448,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - ["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"] - ) + attrs = frozenset(["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"]) class OutputRecordSchema(RecordSchema, OutputSchema): @@ -5674,9 +5604,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5710,7 +5638,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -5741,7 +5669,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -5754,14 +5682,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -5776,7 +5702,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -6039,9 +5965,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6075,7 +5999,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -6106,7 +6030,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -6119,14 +6043,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6141,7 +6063,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -6159,9 +6081,7 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6400,9 +6320,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6436,7 +6354,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -6467,7 +6385,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -6480,14 +6398,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6502,7 +6418,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -6677,10 +6593,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, InlineJavascriptRequirement): - return bool( - self.class_ == other.class_ - and self.expressionLib == other.expressionLib - ) + return bool(self.class_ == other.class_ and self.expressionLib == other.expressionLib) return False def __hash__(self) -> int: @@ -6725,9 +6638,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6741,9 +6652,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'InlineJavascriptRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'InlineJavascriptRequirement'", None, _errors__) _constructed = cls( expressionLib=expressionLib, extension_fields=extension_fields, @@ -6758,7 +6667,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -6791,7 +6700,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -6804,14 +6713,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -6826,7 +6733,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.expressionLib is not None and "expressionLib" not in r: r["expressionLib"] = save( @@ -6939,16 +6846,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `types`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `types`".format(k), SourceLine(_doc, k, str), ) ) @@ -6970,7 +6873,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -7003,7 +6906,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -7016,14 +6919,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7038,7 +6939,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.types is not None and "types" not in r: r["types"] = save( @@ -7095,9 +6996,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, SecondaryFileSchema): - return bool( - self.pattern == other.pattern and self.required == other.required - ) + return bool(self.pattern == other.pattern and self.required == other.required) return False def __hash__(self) -> int: @@ -7153,16 +7052,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `pattern`, `required`".format( - k - ), + "invalid field `{}`, expected one of: `pattern`, `required`".format(k), SourceLine(_doc, k, str), ) ) @@ -7185,7 +7080,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -7216,7 +7111,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -7229,14 +7124,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7251,7 +7144,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.pattern is not None and "pattern" not in r: r["pattern"] = save( @@ -7334,9 +7227,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, LoadListingRequirement): - return bool( - self.class_ == other.class_ and self.loadListing == other.loadListing - ) + return bool(self.class_ == other.class_ and self.loadListing == other.loadListing) return False def __hash__(self) -> int: @@ -7381,25 +7272,19 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `loadListing`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `loadListing`".format(k), SourceLine(_doc, k, str), ) ) break if _errors__: - raise ValidationException( - "Trying 'LoadListingRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'LoadListingRequirement'", None, _errors__) _constructed = cls( loadListing=loadListing, extension_fields=extension_fields, @@ -7414,7 +7299,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -7447,7 +7332,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -7460,14 +7345,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7482,7 +7365,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -7546,9 +7429,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, EnvironmentDef): - return bool( - self.envName == other.envName and self.envValue == other.envValue - ) + return bool(self.envName == other.envName and self.envValue == other.envValue) return False def __hash__(self) -> int: @@ -7601,16 +7482,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `envName`, `envValue`".format( - k - ), + "invalid field `{}`, expected one of: `envName`, `envValue`".format(k), SourceLine(_doc, k, str), ) ) @@ -7633,7 +7510,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -7664,7 +7541,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -7677,14 +7554,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -7699,7 +7574,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.envName is not None and "envName" not in r: r["envName"] = save( @@ -7993,9 +7868,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8030,7 +7903,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -8061,7 +7934,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -8074,14 +7947,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8096,7 +7967,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8407,9 +8278,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8441,7 +8310,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -8472,7 +8341,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -8485,14 +8354,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8507,7 +8374,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8666,16 +8533,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `inputBinding`".format( - k - ), + "invalid field `{}`, expected one of: `inputBinding`".format(k), SourceLine(_doc, k, str), ) ) @@ -8697,7 +8560,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -8728,7 +8591,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -8741,14 +8604,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -8763,7 +8624,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -9069,9 +8930,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9085,9 +8944,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandInputRecordField'", None, _errors__ - ) + raise ValidationException("Trying 'CommandInputRecordField'", None, _errors__) _constructed = cls( doc=doc, name=name, @@ -9112,7 +8969,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -9143,7 +9000,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -9156,14 +9013,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -9178,7 +9033,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9301,9 +9156,7 @@ def save( shift=shift, ) if self.format is not None and "format" not in r: - u = save_relative_uri( - self.format, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.format, str(self.name), True, None, relative_uris) r["format"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9405,9 +9258,7 @@ def save( ) -class CommandInputRecordSchema( - InputRecordSchema, CommandInputSchema, CommandLineBindable -): +class CommandInputRecordSchema(InputRecordSchema, CommandInputSchema, CommandLineBindable): def __init__( self, type: Any, @@ -9448,9 +9299,7 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash( - (self.fields, self.type, self.label, self.doc, self.name, self.inputBinding) - ) + return hash((self.fields, self.type, self.label, self.doc, self.name, self.inputBinding)) @classmethod def fromDoc( @@ -9583,9 +9432,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9599,9 +9446,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandInputRecordSchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandInputRecordSchema'", None, _errors__) _constructed = cls( fields=fields, type=type, @@ -9622,7 +9467,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -9653,7 +9498,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -9666,14 +9511,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -9688,7 +9531,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10002,9 +9845,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10018,9 +9859,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandInputEnumSchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandInputEnumSchema'", None, _errors__) _constructed = cls( symbols=symbols, type=type, @@ -10041,7 +9880,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -10072,7 +9911,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -10085,14 +9924,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -10107,7 +9944,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10125,9 +9962,7 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10237,9 +10072,7 @@ def save( attrs = frozenset(["symbols", "type", "label", "doc", "name", "inputBinding"]) -class CommandInputArraySchema( - InputArraySchema, CommandInputSchema, CommandLineBindable -): +class CommandInputArraySchema(InputArraySchema, CommandInputSchema, CommandLineBindable): def __init__( self, items: Any, @@ -10280,9 +10113,7 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash( - (self.items, self.type, self.label, self.doc, self.name, self.inputBinding) - ) + return hash((self.items, self.type, self.label, self.doc, self.name, self.inputBinding)) @classmethod def fromDoc( @@ -10412,9 +10243,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10428,9 +10257,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandInputArraySchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandInputArraySchema'", None, _errors__) _constructed = cls( items=items, type=type, @@ -10451,7 +10278,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -10482,7 +10309,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -10495,14 +10322,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -10517,7 +10342,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10878,9 +10703,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10894,9 +10717,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputRecordField'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputRecordField'", None, _errors__) _constructed = cls( doc=doc, name=name, @@ -10919,7 +10740,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -10950,7 +10771,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -10963,14 +10784,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -10985,7 +10804,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -11108,9 +10927,7 @@ def save( shift=shift, ) if self.format is not None and "format" not in r: - u = save_relative_uri( - self.format, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.format, str(self.name), True, None, relative_uris) r["format"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11321,9 +11138,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11337,9 +11152,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputRecordSchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputRecordSchema'", None, _errors__) _constructed = cls( fields=fields, type=type, @@ -11359,7 +11172,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -11390,7 +11203,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -11403,14 +11216,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -11425,7 +11236,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -11688,9 +11499,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11704,9 +11513,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputEnumSchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputEnumSchema'", None, _errors__) _constructed = cls( symbols=symbols, type=type, @@ -11726,7 +11533,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -11757,7 +11564,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -11770,14 +11577,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -11792,7 +11597,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -11810,9 +11615,7 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) + u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -12051,9 +11854,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -12067,9 +11868,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputArraySchema'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputArraySchema'", None, _errors__) _constructed = cls( items=items, type=type, @@ -12089,7 +11888,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -12120,7 +11919,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -12133,14 +11932,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -12155,7 +11952,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -12565,9 +12362,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -12607,7 +12402,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -12623,7 +12418,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -12653,7 +12448,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -12666,14 +12461,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -12688,7 +12481,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -13166,9 +12959,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -13182,9 +12973,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'CommandOutputParameter'", None, _errors__ - ) + raise ValidationException("Trying 'CommandOutputParameter'", None, _errors__) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -13207,7 +12996,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -13223,7 +13012,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -13253,7 +13042,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -13266,14 +13055,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -13288,7 +13075,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -13881,9 +13668,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -13928,7 +13713,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -13944,7 +13729,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -13976,7 +13761,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -13989,14 +13774,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14011,7 +13794,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -14155,9 +13938,7 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) + u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14610,9 +14391,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -14646,7 +14425,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -14679,7 +14458,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -14692,14 +14471,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -14714,7 +14491,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.dockerPull is not None and "dockerPull" not in r: r["dockerPull"] = save( @@ -14933,16 +14710,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `packages`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `packages`".format(k), SourceLine(_doc, k, str), ) ) @@ -14964,7 +14737,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -14997,7 +14770,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -15010,14 +14783,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15032,7 +14803,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.packages is not None and "packages" not in r: r["packages"] = save( @@ -15169,9 +14940,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -15202,7 +14971,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -15233,7 +15002,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -15246,14 +15015,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15268,7 +15035,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.package is not None and "package" not in r: r["package"] = save( @@ -15449,9 +15216,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -15482,7 +15247,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -15513,7 +15278,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -15526,14 +15291,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15548,7 +15311,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.entryname is not None and "entryname" not in r: r["entryname"] = save( @@ -15692,25 +15455,19 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `listing`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `listing`".format(k), SourceLine(_doc, k, str), ) ) break if _errors__: - raise ValidationException( - "Trying 'InitialWorkDirRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'InitialWorkDirRequirement'", None, _errors__) _constructed = cls( listing=listing, extension_fields=extension_fields, @@ -15725,7 +15482,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -15758,7 +15515,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -15771,14 +15528,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15793,7 +15548,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.listing is not None and "listing" not in r: r["listing"] = save( @@ -15897,16 +15652,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `envDef`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `envDef`".format(k), SourceLine(_doc, k, str), ) ) @@ -15928,7 +15679,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -15961,7 +15712,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -15974,14 +15725,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -15996,7 +15745,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.envDef is not None and "envDef" not in r: r["envDef"] = save( @@ -16088,9 +15837,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -16102,9 +15849,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'ShellCommandRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'ShellCommandRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -16118,7 +15863,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -16151,7 +15896,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -16164,14 +15909,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -16186,7 +15929,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # top refers to the directory level @@ -16452,9 +16195,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -16490,7 +16231,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -16523,7 +16264,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -16536,14 +16277,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -16558,7 +16297,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.coresMin is not None and "coresMin" not in r: r["coresMin"] = save( @@ -16786,9 +16525,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, WorkReuse): - return bool( - self.class_ == other.class_ and self.enableReuse == other.enableReuse - ) + return bool(self.class_ == other.class_ and self.enableReuse == other.enableReuse) return False def __hash__(self) -> int: @@ -16830,16 +16567,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `enableReuse`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `enableReuse`".format(k), SourceLine(_doc, k, str), ) ) @@ -16861,7 +16594,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -16894,7 +16627,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -16907,14 +16640,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -16929,7 +16660,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.enableReuse is not None and "enableReuse" not in r: r["enableReuse"] = save( @@ -17004,10 +16735,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, NetworkAccess): - return bool( - self.class_ == other.class_ - and self.networkAccess == other.networkAccess - ) + return bool(self.class_ == other.class_ and self.networkAccess == other.networkAccess) return False def __hash__(self) -> int: @@ -17049,9 +16777,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17080,7 +16806,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -17113,7 +16839,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -17126,14 +16852,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -17148,7 +16872,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.networkAccess is not None and "networkAccess" not in r: r["networkAccess"] = save( @@ -17238,10 +16962,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, InplaceUpdateRequirement): - return bool( - self.class_ == other.class_ - and self.inplaceUpdate == other.inplaceUpdate - ) + return bool(self.class_ == other.class_ and self.inplaceUpdate == other.inplaceUpdate) return False def __hash__(self) -> int: @@ -17283,9 +17004,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17299,9 +17018,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'InplaceUpdateRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'InplaceUpdateRequirement'", None, _errors__) _constructed = cls( inplaceUpdate=inplaceUpdate, extension_fields=extension_fields, @@ -17316,7 +17033,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -17349,7 +17066,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -17362,14 +17079,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -17384,7 +17099,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.inplaceUpdate is not None and "inplaceUpdate" not in r: r["inplaceUpdate"] = save( @@ -17451,9 +17166,7 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, ToolTimeLimit): - return bool( - self.class_ == other.class_ and self.timelimit == other.timelimit - ) + return bool(self.class_ == other.class_ and self.timelimit == other.timelimit) return False def __hash__(self) -> int: @@ -17495,16 +17208,12 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `timelimit`".format( - k - ), + "invalid field `{}`, expected one of: `class`, `timelimit`".format(k), SourceLine(_doc, k, str), ) ) @@ -17526,7 +17235,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -17559,7 +17268,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -17572,14 +17281,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -17594,7 +17301,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.timelimit is not None and "timelimit" not in r: r["timelimit"] = save( @@ -17834,9 +17541,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17850,9 +17555,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'ExpressionToolOutputParameter'", None, _errors__ - ) + raise ValidationException("Trying 'ExpressionToolOutputParameter'", None, _errors__) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -17874,7 +17577,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -17890,7 +17593,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -17920,7 +17623,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -17933,14 +17636,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -17955,7 +17656,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18101,9 +17802,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] - ) + attrs = frozenset(["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"]) class WorkflowInputParameter(InputParameter): @@ -18399,9 +18098,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -18415,9 +18112,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'WorkflowInputParameter'", None, _errors__ - ) + raise ValidationException("Trying 'WorkflowInputParameter'", None, _errors__) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -18443,7 +18138,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -18459,7 +18154,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -18489,7 +18184,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -18502,14 +18197,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -18524,7 +18217,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -19032,9 +18725,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19072,7 +18763,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -19088,7 +18779,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -19120,7 +18811,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -19133,14 +18824,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -19155,7 +18844,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -19299,9 +18988,7 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) + u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19618,9 +19305,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19634,9 +19319,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'WorkflowOutputParameter'", None, _errors__ - ) + raise ValidationException("Trying 'WorkflowOutputParameter'", None, _errors__) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -19660,7 +19343,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -19676,7 +19359,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -19706,7 +19389,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -19719,14 +19402,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -19741,7 +19422,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -19858,9 +19539,7 @@ def save( shift=shift, ) if self.outputSource is not None and "outputSource" not in r: - u = save_relative_uri( - self.outputSource, str(self.id), False, 1, relative_uris - ) + u = save_relative_uri(self.outputSource, str(self.id), False, 1, relative_uris) r["outputSource"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -20221,9 +19900,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -20260,7 +19937,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -20276,7 +19953,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -20306,7 +19983,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -20319,14 +19996,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -20341,7 +20016,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -20604,9 +20279,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -20634,7 +20307,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -20650,7 +20323,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -20680,7 +20353,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -20693,14 +20366,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -20715,7 +20386,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -21014,7 +20685,7 @@ def fromDoc( else: hints = None - subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) + subscope_baseuri = expand_url("run", baseuri, loadingOptions, True) try: run = load_field( _doc.get("run"), @@ -21070,9 +20741,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -21111,7 +20780,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -21127,7 +20796,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -21157,7 +20826,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -21170,14 +20839,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -21192,7 +20859,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -21360,9 +21027,7 @@ def save( shift=shift, ) if self.scatterMethod is not None and "scatterMethod" not in r: - u = save_relative_uri( - self.scatterMethod, str(self.id), False, None, relative_uris - ) + u = save_relative_uri(self.scatterMethod, str(self.id), False, None, relative_uris) r["scatterMethod"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21700,9 +21365,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -21740,7 +21403,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -21756,7 +21419,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split('#')) > 1: + if len(temp_id.split("#")) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -21788,7 +21451,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -21801,14 +21464,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -21823,7 +21484,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -21967,9 +21628,7 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) + u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -22081,9 +21740,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -22095,9 +21752,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'SubworkflowFeatureRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'SubworkflowFeatureRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -22111,7 +21766,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -22144,7 +21799,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -22157,14 +21812,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -22179,7 +21832,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # top refers to the directory level @@ -22245,9 +21898,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -22259,9 +21910,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'ScatterFeatureRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'ScatterFeatureRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -22275,7 +21924,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -22308,7 +21957,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -22321,14 +21970,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -22343,7 +21990,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # top refers to the directory level @@ -22409,9 +22056,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -22423,9 +22068,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'MultipleInputFeatureRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'MultipleInputFeatureRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -22439,7 +22082,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -22472,7 +22115,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -22485,14 +22128,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -22507,7 +22148,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # top refers to the directory level @@ -22573,9 +22214,7 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -22587,9 +22226,7 @@ def fromDoc( break if _errors__: - raise ValidationException( - "Trying 'StepInputExpressionRequirement'", None, _errors__ - ) + raise ValidationException("Trying 'StepInputExpressionRequirement'", None, _errors__) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -22603,7 +22240,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 + shift: int = 0, ) -> CommentedMap: if keys is None: keys = [] @@ -22636,7 +22273,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != 'class': + if key != "class": line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -22649,14 +22286,12 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): + if len(saved_val) == 1: saved_val = saved_val[0] r[key] = saved_val @@ -22671,7 +22306,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift + shift=shift, ) # top refers to the directory level @@ -23306,16 +22941,14 @@ def save( array_of_union_of_FileLoader_or_DirectoryLoader, ) ) -secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _SecondaryDSLLoader( - union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader +secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = ( + _SecondaryDSLLoader(union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader) ) uri_union_of_None_type_or_strtype_True_False_None = _URILoader( union_of_None_type_or_strtype, True, False, None ) Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") -uri_Directory_classLoader_False_True_None = _URILoader( - Directory_classLoader, False, True, None -) +uri_Directory_classLoader_False_True_None = _URILoader(Directory_classLoader, False, True, None) union_of_None_type_or_booltype = _UnionLoader( ( None_type, @@ -23329,11 +22962,13 @@ def save( ) ) array_of_SecondaryFileSchemaLoader = _ArrayLoader(SecondaryFileSchemaLoader) -union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _UnionLoader( - ( - None_type, - SecondaryFileSchemaLoader, - array_of_SecondaryFileSchemaLoader, +union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = ( + _UnionLoader( + ( + None_type, + SecondaryFileSchemaLoader, + array_of_SecondaryFileSchemaLoader, + ) ) ) secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _SecondaryDSLLoader( @@ -23347,11 +22982,13 @@ def save( ExpressionLoader, ) ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, - True, - False, - None, +uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = ( + _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, + True, + False, + None, + ) ) union_of_None_type_or_strtype_or_ExpressionLoader = _UnionLoader( ( @@ -23449,13 +23086,15 @@ def save( WorkflowInputParameterLoader, ) ) -array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = ( - _ArrayLoader(union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader) +array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = _ArrayLoader( + union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader ) -idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = _IdMapLoader( - array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader, - "id", - "type", +idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = ( + _IdMapLoader( + array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader, + "id", + "type", + ) ) union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader = _UnionLoader( ( @@ -23659,10 +23298,8 @@ def save( array_of_CommandInputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( - _IdMapLoader( - union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" - ) +idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" ) union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( ( @@ -23703,10 +23340,8 @@ def save( array_of_CommandOutputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( - _IdMapLoader( - union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" - ) +idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" ) union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( ( @@ -23758,13 +23393,15 @@ def save( CommandLineBindingLoader, ) ) -array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( - _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) +array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _ArrayLoader( + union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader ) -union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( - ( - None_type, - array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, +union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( + _UnionLoader( + ( + None_type, + array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + ) ) ) array_of_inttype = _ArrayLoader(inttype) @@ -23774,15 +23411,11 @@ def save( array_of_inttype, ) ) -DockerRequirement_classLoader = _EnumLoader( - ("DockerRequirement",), "DockerRequirement_class" -) +DockerRequirement_classLoader = _EnumLoader(("DockerRequirement",), "DockerRequirement_class") uri_DockerRequirement_classLoader_False_True_None = _URILoader( DockerRequirement_classLoader, False, True, None ) -SoftwareRequirement_classLoader = _EnumLoader( - ("SoftwareRequirement",), "SoftwareRequirement_class" -) +SoftwareRequirement_classLoader = _EnumLoader(("SoftwareRequirement",), "SoftwareRequirement_class") uri_SoftwareRequirement_classLoader_False_True_None = _URILoader( SoftwareRequirement_classLoader, False, True, None ) @@ -23818,9 +23451,7 @@ def save( ExpressionLoader, ) ) -EnvVarRequirement_classLoader = _EnumLoader( - ("EnvVarRequirement",), "EnvVarRequirement_class" -) +EnvVarRequirement_classLoader = _EnumLoader(("EnvVarRequirement",), "EnvVarRequirement_class") uri_EnvVarRequirement_classLoader_False_True_None = _URILoader( EnvVarRequirement_classLoader, False, True, None ) @@ -23834,16 +23465,12 @@ def save( uri_ShellCommandRequirement_classLoader_False_True_None = _URILoader( ShellCommandRequirement_classLoader, False, True, None ) -ResourceRequirement_classLoader = _EnumLoader( - ("ResourceRequirement",), "ResourceRequirement_class" -) +ResourceRequirement_classLoader = _EnumLoader(("ResourceRequirement",), "ResourceRequirement_class") uri_ResourceRequirement_classLoader_False_True_None = _URILoader( ResourceRequirement_classLoader, False, True, None ) WorkReuse_classLoader = _EnumLoader(("WorkReuse",), "WorkReuse_class") -uri_WorkReuse_classLoader_False_True_None = _URILoader( - WorkReuse_classLoader, False, True, None -) +uri_WorkReuse_classLoader_False_True_None = _URILoader(WorkReuse_classLoader, False, True, None) union_of_booltype_or_ExpressionLoader = _UnionLoader( ( booltype, @@ -23884,9 +23511,7 @@ def save( idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader( array_of_WorkflowInputParameterLoader, "id", "type" ) -array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( - ExpressionToolOutputParameterLoader -) +array_of_ExpressionToolOutputParameterLoader = _ArrayLoader(ExpressionToolOutputParameterLoader) idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( array_of_ExpressionToolOutputParameterLoader, "id", "type" ) @@ -23918,13 +23543,11 @@ def save( union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) ) -uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = ( - _URILoader( - union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, - True, - False, - None, - ) +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = _URILoader( + union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, + True, + False, + None, ) array_of_Any_type = _ArrayLoader(Any_type) union_of_None_type_or_array_of_Any_type = _UnionLoader( @@ -23936,14 +23559,12 @@ def save( idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( union_of_None_type_or_array_of_Any_type, "class", "None" ) -union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( - _UnionLoader( - ( - strtype, - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, - ) +union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( + ( + strtype, + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, ) ) uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None = _URILoader( @@ -23965,9 +23586,7 @@ def save( union_of_None_type_or_ScatterMethodLoader, False, True, None ) Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") -uri_Workflow_classLoader_False_True_None = _URILoader( - Workflow_classLoader, False, True, None -) +uri_Workflow_classLoader_False_True_None = _URILoader(Workflow_classLoader, False, True, None) array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( array_of_WorkflowOutputParameterLoader, "id", "type" @@ -24008,10 +23627,8 @@ def save( WorkflowLoader, ) ) -array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( - _ArrayLoader( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader - ) +array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _ArrayLoader( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader ) union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( ( From 3d61e55b43a4582b7b1d2c63b5e32add6787eb55 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Thu, 8 Jun 2023 10:51:54 -0600 Subject: [PATCH 39/44] updating Makefile to properly exclude cwl files --- Makefile | 6 +- schema_salad/metaschema.py | 250 ++++-- schema_salad/tests/cwl_v1_0.py | 1141 +++++++++++++++++---------- schema_salad/tests/cwl_v1_1.py | 1349 ++++++++++++++++++++------------ schema_salad/tests/cwl_v1_2.py | 2 + 5 files changed, 1759 insertions(+), 989 deletions(-) diff --git a/Makefile b/Makefile index b5c60480b..3f894a158 100644 --- a/Makefile +++ b/Makefile @@ -47,7 +47,7 @@ help: Makefile ## cleanup : shortcut for "make sort_imports format flake8 diff_pydocstyle_report" cleanup: sort_imports format flake8 diff_pydocstyle_report -## install-dep : install most of the development dependencies via pip +## install-dep : inshttps://github.com/common-workflow-language/cwltool/issues?q=is%3Aissue+is%3Aopen+author%3Atom-tantall most of the development dependencies via pip install-dep: install-dependencies install-dependencies: FORCE @@ -106,10 +106,10 @@ codespell: ## format : check/fix all code indentation and formatting (runs black) format: - black --exclude metaschema.py --exclude _version.py --exclude tests/cwl_v1_0.py --exclude tests/cwl_v1_1.py --exclude tests/cwl_v1_2.py schema_salad setup.py mypy-stubs + black --force-exclude "metaschema.py|schema_salad/tests/cwl_v1*"" --exclude _version.py schema_salad setup.py mypy-stubs format-check: - black --diff --check --exclude metaschema.py --exclude _version.py --exclude tests/cwl_v1_0.py --exclude tests/cwl_v1_1.py --exclude tests/cwl_v1_2.py schema_salad setup.py mypy-stubs + black --diff --check --force-exclude "metaschema.py|schema_salad/tests/cwl_v1*" --exclude _version.py setup.py mypy-stubs ## pylint : run static code analysis on Python code pylint: $(PYSOURCES) diff --git a/schema_salad/metaschema.py b/schema_salad/metaschema.py index 8e57efa31..81040a14a 100644 --- a/schema_salad/metaschema.py +++ b/schema_salad/metaschema.py @@ -353,6 +353,7 @@ def add_kv( @no_type_check def iterate_through_doc(keys: List[Any]) -> Optional[CommentedMap]: + """Take a list of keys/indexes and iterates through the global CommentedMap.""" doc = doc_line_info for key in keys: if isinstance(doc, CommentedMap): @@ -405,6 +406,7 @@ def get_line_numbers(doc: Optional[CommentedMap]) -> Dict[Any, Dict[str, int]]: def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> int: + """Given a array of line column information, get the minimum column.""" min_col = 0 for line in line_numbers: if line_numbers[line]["col"] > min_col: @@ -1137,7 +1139,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, RecordField): return bool( - self.doc == other.doc and self.name == other.name and self.type == other.type + self.doc == other.doc + and self.name == other.name + and self.type == other.type ) return False @@ -1221,12 +1225,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`".format(k), + "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1251,7 +1259,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1282,7 +1290,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1295,12 +1303,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1315,7 +1325,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1464,12 +1474,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format(k), + "invalid field `{}`, expected one of: `fields`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1492,7 +1506,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1523,7 +1537,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1536,12 +1550,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1556,7 +1572,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -1725,7 +1741,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -1757,7 +1775,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1788,7 +1806,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1801,12 +1819,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1821,7 +1841,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1839,7 +1859,9 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -1961,12 +1983,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format(k), + "invalid field `{}`, expected one of: `items`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1989,7 +2015,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -2020,7 +2046,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -2033,12 +2059,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -2053,7 +2081,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.items is not None and "items" not in r: u = save_relative_uri(self.items, base_url, False, 2, relative_uris) @@ -2396,7 +2424,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2435,7 +2465,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -2466,7 +2496,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -2479,12 +2509,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -2499,7 +2531,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self._id is not None and "_id" not in r: u = save_relative_uri(self._id, base_url, True, None, relative_uris) @@ -2830,7 +2862,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2860,7 +2894,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -2891,7 +2925,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -2904,12 +2938,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -2924,10 +2960,12 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.specializeFrom is not None and "specializeFrom" not in r: - u = save_relative_uri(self.specializeFrom, base_url, False, 1, relative_uris) + u = save_relative_uri( + self.specializeFrom, base_url, False, 1, relative_uris + ) r["specializeFrom"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3027,7 +3065,9 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash((self.doc, self.name, self.type, self.jsonldPredicate, self.default)) + return hash( + (self.doc, self.name, self.type, self.jsonldPredicate, self.default) + ) @classmethod def fromDoc( @@ -3142,7 +3182,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3176,7 +3218,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -3207,7 +3249,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -3220,12 +3262,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3240,7 +3284,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3690,7 +3734,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3732,7 +3778,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -3763,7 +3809,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -3776,12 +3822,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3796,7 +3844,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3898,7 +3946,9 @@ def save( shift=shift, ) if self.docParent is not None and "docParent" not in r: - u = save_relative_uri(self.docParent, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docParent, str(self.name), False, None, relative_uris + ) r["docParent"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3913,7 +3963,9 @@ def save( shift=shift, ) if self.docChild is not None and "docChild" not in r: - u = save_relative_uri(self.docChild, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docChild, str(self.name), False, None, relative_uris + ) r["docChild"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -3928,7 +3980,9 @@ def save( shift=shift, ) if self.docAfter is not None and "docAfter" not in r: - u = save_relative_uri(self.docAfter, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docAfter, str(self.name), False, None, relative_uris + ) r["docAfter"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4364,7 +4418,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4404,7 +4460,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4435,7 +4491,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -4448,12 +4504,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -4468,7 +4526,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4507,7 +4565,9 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4564,7 +4624,9 @@ def save( shift=shift, ) if self.docParent is not None and "docParent" not in r: - u = save_relative_uri(self.docParent, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docParent, str(self.name), False, None, relative_uris + ) r["docParent"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4579,7 +4641,9 @@ def save( shift=shift, ) if self.docChild is not None and "docChild" not in r: - u = save_relative_uri(self.docChild, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docChild, str(self.name), False, None, relative_uris + ) r["docChild"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4594,7 +4658,9 @@ def save( shift=shift, ) if self.docAfter is not None and "docAfter" not in r: - u = save_relative_uri(self.docAfter, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docAfter, str(self.name), False, None, relative_uris + ) r["docAfter"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4902,7 +4968,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4938,7 +5006,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4969,7 +5037,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -4982,12 +5050,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -5002,7 +5072,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5062,7 +5132,9 @@ def save( shift=shift, ) if self.docParent is not None and "docParent" not in r: - u = save_relative_uri(self.docParent, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docParent, str(self.name), False, None, relative_uris + ) r["docParent"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5077,7 +5149,9 @@ def save( shift=shift, ) if self.docChild is not None and "docChild" not in r: - u = save_relative_uri(self.docChild, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docChild, str(self.name), False, None, relative_uris + ) r["docChild"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5092,7 +5166,9 @@ def save( shift=shift, ) if self.docAfter is not None and "docAfter" not in r: - u = save_relative_uri(self.docAfter, str(self.name), False, None, relative_uris) + u = save_relative_uri( + self.docAfter, str(self.name), False, None, relative_uris + ) r["docAfter"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5136,7 +5212,9 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["name", "inVocab", "doc", "docParent", "docChild", "docAfter", "type"]) + attrs = frozenset( + ["name", "inVocab", "doc", "docParent", "docChild", "docAfter", "type"] + ) _vocab = { @@ -5368,15 +5446,17 @@ def save( ) Documentation_nameLoader = _EnumLoader(("documentation",), "Documentation_name") typedsl_Documentation_nameLoader_2 = _TypeDSLLoader(Documentation_nameLoader, 2) -union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _UnionLoader( - ( - SaladRecordSchemaLoader, - SaladEnumSchemaLoader, - DocumentationLoader, +union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = ( + _UnionLoader( + ( + SaladRecordSchemaLoader, + SaladEnumSchemaLoader, + DocumentationLoader, + ) ) ) -array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = ( - _ArrayLoader(union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader) +array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _ArrayLoader( + union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader ) union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader_or_array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _UnionLoader( ( diff --git a/schema_salad/tests/cwl_v1_0.py b/schema_salad/tests/cwl_v1_0.py index 647680aa5..c0d9ac0c1 100644 --- a/schema_salad/tests/cwl_v1_0.py +++ b/schema_salad/tests/cwl_v1_0.py @@ -353,6 +353,7 @@ def add_kv( @no_type_check def iterate_through_doc(keys: List[Any]) -> Optional[CommentedMap]: + """Take a list of keys/indexes and iterates through the global CommentedMap.""" doc = doc_line_info for key in keys: if isinstance(doc, CommentedMap): @@ -405,6 +406,7 @@ def get_line_numbers(doc: Optional[CommentedMap]) -> Dict[Any, Dict[str, int]]: def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> int: + """Given a array of line column information, get the minimum column.""" min_col = 0 for line in line_numbers: if line_numbers[line]["col"] > min_col: @@ -1133,7 +1135,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, RecordField): return bool( - self.name == other.name and self.doc == other.doc and self.type == other.type + self.name == other.name + and self.doc == other.doc + and self.type == other.type ) return False @@ -1217,12 +1221,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `doc`, `type`".format(k), + "invalid field `{}`, expected one of: `name`, `doc`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1247,7 +1255,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1278,7 +1286,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1291,12 +1299,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1311,7 +1321,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1460,12 +1470,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format(k), + "invalid field `{}`, expected one of: `fields`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1488,7 +1502,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1519,7 +1533,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1532,12 +1546,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1552,7 +1568,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -1688,12 +1704,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`".format(k), + "invalid field `{}`, expected one of: `symbols`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1716,7 +1736,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1747,7 +1767,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1760,12 +1780,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1780,7 +1802,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -1905,12 +1927,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format(k), + "invalid field `{}`, expected one of: `items`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1933,7 +1959,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1964,7 +1990,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1977,12 +2003,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1997,7 +2025,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.items is not None and "items" not in r: r["items"] = save( @@ -2417,7 +2445,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2456,7 +2486,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -2489,7 +2519,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -2502,12 +2532,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -2522,7 +2554,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -2848,7 +2880,9 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash((self.class_, self.location, self.path, self.basename, self.listing)) + return hash( + (self.class_, self.location, self.path, self.basename, self.listing) + ) @classmethod def fromDoc( @@ -2943,7 +2977,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2975,7 +3011,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -3008,7 +3044,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -3021,12 +3057,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3041,7 +3079,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -3309,7 +3347,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3343,7 +3383,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -3374,7 +3414,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -3387,12 +3427,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3407,7 +3449,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3652,7 +3694,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3685,7 +3729,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -3716,7 +3760,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -3729,12 +3773,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3749,7 +3795,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3991,7 +4037,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4025,7 +4073,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4056,7 +4104,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -4069,12 +4117,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -4089,7 +4139,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4107,7 +4157,9 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4316,7 +4368,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4348,7 +4402,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4379,7 +4433,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -4392,12 +4446,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -4412,7 +4468,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.items is not None and "items" not in r: r["items"] = save( @@ -4642,7 +4698,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4675,7 +4733,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4706,7 +4764,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -4719,12 +4777,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -4739,7 +4799,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4933,7 +4993,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4964,7 +5026,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4995,7 +5057,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -5008,12 +5070,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -5028,7 +5092,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -5225,7 +5289,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5257,7 +5323,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -5288,7 +5354,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -5301,12 +5367,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -5321,7 +5389,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -5533,7 +5601,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5565,7 +5635,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -5596,7 +5666,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -5609,12 +5679,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -5629,7 +5701,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.items is not None and "items" not in r: r["items"] = save( @@ -5979,7 +6051,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6017,7 +6091,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -6033,7 +6107,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -6063,7 +6137,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -6076,12 +6150,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -6096,7 +6172,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -6507,7 +6583,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6543,7 +6621,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -6559,7 +6637,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -6589,7 +6667,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -6602,12 +6680,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -6622,7 +6702,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -6835,7 +6915,10 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, InlineJavascriptRequirement): - return bool(self.class_ == other.class_ and self.expressionLib == other.expressionLib) + return bool( + self.class_ == other.class_ + and self.expressionLib == other.expressionLib + ) return False def __hash__(self) -> int: @@ -6880,7 +6963,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6894,7 +6979,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'InlineJavascriptRequirement'", None, _errors__) + raise ValidationException( + "Trying 'InlineJavascriptRequirement'", None, _errors__ + ) _constructed = cls( expressionLib=expressionLib, extension_fields=extension_fields, @@ -6909,7 +6996,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -6942,7 +7029,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -6955,12 +7042,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -6975,7 +7064,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.expressionLib is not None and "expressionLib" not in r: r["expressionLib"] = save( @@ -7084,12 +7173,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `types`".format(k), + "invalid field `{}`, expected one of: `class`, `types`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -7111,7 +7204,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -7144,7 +7237,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -7157,12 +7250,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -7177,7 +7272,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.types is not None and "types" not in r: r["types"] = save( @@ -7241,7 +7336,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, EnvironmentDef): - return bool(self.envName == other.envName and self.envValue == other.envValue) + return bool( + self.envName == other.envName and self.envValue == other.envValue + ) return False def __hash__(self) -> int: @@ -7294,12 +7391,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `envName`, `envValue`".format(k), + "invalid field `{}`, expected one of: `envName`, `envValue`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -7322,7 +7423,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -7353,7 +7454,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -7366,12 +7467,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -7386,7 +7489,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.envName is not None and "envName" not in r: r["envName"] = save( @@ -7680,7 +7783,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -7715,7 +7820,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -7746,7 +7851,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -7759,12 +7864,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -7779,7 +7886,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8069,7 +8176,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8100,7 +8209,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -8131,7 +8240,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -8144,12 +8253,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -8164,7 +8275,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.glob is not None and "glob" not in r: r["glob"] = save( @@ -8394,7 +8505,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8408,7 +8521,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandInputRecordField'", None, _errors__) + raise ValidationException( + "Trying 'CommandInputRecordField'", None, _errors__ + ) _constructed = cls( name=name, doc=doc, @@ -8428,7 +8543,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -8459,7 +8574,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -8472,12 +8587,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -8492,7 +8609,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -8737,7 +8854,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8751,7 +8870,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandInputRecordSchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandInputRecordSchema'", None, _errors__ + ) _constructed = cls( fields=fields, type=type, @@ -8770,7 +8891,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -8801,7 +8922,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -8814,12 +8935,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -8834,7 +8957,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9076,7 +9199,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9090,7 +9215,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandInputEnumSchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandInputEnumSchema'", None, _errors__ + ) _constructed = cls( symbols=symbols, type=type, @@ -9110,7 +9237,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -9141,7 +9268,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -9154,12 +9281,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -9174,7 +9303,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9192,7 +9321,9 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9401,7 +9532,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9415,7 +9548,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandInputArraySchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandInputArraySchema'", None, _errors__ + ) _constructed = cls( items=items, type=type, @@ -9433,7 +9568,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -9464,7 +9599,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -9477,12 +9612,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -9497,7 +9634,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.items is not None and "items" not in r: r["items"] = save( @@ -9727,7 +9864,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9741,7 +9880,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputRecordField'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputRecordField'", None, _errors__ + ) _constructed = cls( name=name, doc=doc, @@ -9760,7 +9901,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -9791,7 +9932,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -9804,12 +9945,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -9824,7 +9967,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10048,7 +10191,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10062,7 +10207,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputRecordSchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputRecordSchema'", None, _errors__ + ) _constructed = cls( fields=fields, type=type, @@ -10081,7 +10228,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -10112,7 +10259,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -10125,12 +10272,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -10145,7 +10294,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10357,7 +10506,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10371,7 +10522,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputEnumSchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputEnumSchema'", None, _errors__ + ) _constructed = cls( symbols=symbols, type=type, @@ -10389,7 +10542,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -10420,7 +10573,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -10433,12 +10586,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -10453,7 +10608,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -10665,7 +10820,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10679,7 +10836,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputArraySchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputArraySchema'", None, _errors__ + ) _constructed = cls( items=items, type=type, @@ -10697,7 +10856,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -10728,7 +10887,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -10741,12 +10900,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -10761,7 +10922,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.items is not None and "items" not in r: r["items"] = save( @@ -11115,7 +11276,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11153,7 +11316,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -11169,7 +11332,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -11199,7 +11362,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -11212,12 +11375,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -11232,7 +11397,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -11669,7 +11834,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11683,7 +11850,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputParameter'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputParameter'", None, _errors__ + ) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -11706,7 +11875,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -11722,7 +11891,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -11752,7 +11921,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -11765,12 +11934,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -11785,7 +11956,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -12378,7 +12549,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -12423,7 +12596,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -12439,7 +12612,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -12471,7 +12644,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -12484,12 +12657,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -12504,7 +12679,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -12648,7 +12823,9 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -13083,7 +13260,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -13117,7 +13296,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -13150,7 +13329,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -13163,12 +13342,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -13183,7 +13364,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.dockerPull is not None and "dockerPull" not in r: r["dockerPull"] = save( @@ -13402,12 +13583,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `packages`".format(k), + "invalid field `{}`, expected one of: `class`, `packages`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -13429,7 +13614,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -13462,7 +13647,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -13475,12 +13660,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -13495,7 +13682,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.packages is not None and "packages" not in r: r["packages"] = save( @@ -13632,7 +13819,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -13663,7 +13852,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -13694,7 +13883,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -13707,12 +13896,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -13727,7 +13918,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.package is not None and "package" not in r: r["package"] = save( @@ -13908,7 +14099,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -13939,7 +14132,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -13970,7 +14163,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -13983,12 +14176,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -14003,7 +14198,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.entryname is not None and "entryname" not in r: r["entryname"] = save( @@ -14147,19 +14342,25 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `listing`".format(k), + "invalid field `{}`, expected one of: `class`, `listing`".format( + k + ), SourceLine(_doc, k, str), ) ) break if _errors__: - raise ValidationException("Trying 'InitialWorkDirRequirement'", None, _errors__) + raise ValidationException( + "Trying 'InitialWorkDirRequirement'", None, _errors__ + ) _constructed = cls( listing=listing, extension_fields=extension_fields, @@ -14174,7 +14375,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -14207,7 +14408,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -14220,12 +14421,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -14240,7 +14443,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.listing is not None and "listing" not in r: r["listing"] = save( @@ -14344,12 +14547,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `envDef`".format(k), + "invalid field `{}`, expected one of: `class`, `envDef`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -14371,7 +14578,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -14404,7 +14611,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -14417,12 +14624,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -14437,7 +14646,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.envDef is not None and "envDef" not in r: r["envDef"] = save( @@ -14529,7 +14738,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -14541,7 +14752,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'ShellCommandRequirement'", None, _errors__) + raise ValidationException( + "Trying 'ShellCommandRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -14555,7 +14768,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -14588,7 +14801,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -14601,12 +14814,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -14621,7 +14836,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -14887,7 +15102,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -14923,7 +15140,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -14956,7 +15173,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -14969,12 +15186,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -14989,7 +15208,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.coresMin is not None and "coresMin" not in r: r["coresMin"] = save( @@ -15413,7 +15632,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -15427,7 +15648,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'ExpressionToolOutputParameter'", None, _errors__) + raise ValidationException( + "Trying 'ExpressionToolOutputParameter'", None, _errors__ + ) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -15450,7 +15673,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -15466,7 +15689,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -15496,7 +15719,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -15509,12 +15732,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -15529,7 +15754,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -15965,7 +16190,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -16003,7 +16230,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -16019,7 +16246,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -16051,7 +16278,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -16064,12 +16291,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -16084,7 +16313,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -16228,7 +16457,9 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16569,7 +16800,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -16583,7 +16816,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'WorkflowOutputParameter'", None, _errors__) + raise ValidationException( + "Trying 'WorkflowOutputParameter'", None, _errors__ + ) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -16608,7 +16843,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -16624,7 +16859,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -16654,7 +16889,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -16667,12 +16902,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -16687,7 +16924,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -16825,7 +17062,9 @@ def save( shift=shift, ) if self.outputSource is not None and "outputSource" not in r: - u = save_relative_uri(self.outputSource, str(self.id), False, 1, relative_uris) + u = save_relative_uri( + self.outputSource, str(self.id), False, 1, relative_uris + ) r["outputSource"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -16991,7 +17230,9 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash((self.source, self.linkMerge, self.id, self.default, self.valueFrom)) + return hash( + (self.source, self.linkMerge, self.id, self.default, self.valueFrom) + ) @classmethod def fromDoc( @@ -17109,7 +17350,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17143,7 +17386,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -17159,7 +17402,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -17189,7 +17432,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -17202,12 +17445,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -17222,7 +17467,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -17407,7 +17652,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17435,7 +17682,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -17451,7 +17698,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -17481,7 +17728,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -17494,12 +17741,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -17514,7 +17763,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -17867,7 +18116,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17906,7 +18157,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -17922,7 +18173,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -17952,7 +18203,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -17965,12 +18216,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -17985,7 +18238,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18153,7 +18406,9 @@ def save( shift=shift, ) if self.scatterMethod is not None and "scatterMethod" not in r: - u = save_relative_uri(self.scatterMethod, str(self.id), False, None, relative_uris) + u = save_relative_uri( + self.scatterMethod, str(self.id), False, None, relative_uris + ) r["scatterMethod"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18491,7 +18746,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -18529,7 +18786,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -18545,7 +18802,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -18577,7 +18834,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -18590,12 +18847,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -18610,7 +18869,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18754,7 +19013,9 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -18866,7 +19127,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -18878,7 +19141,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'SubworkflowFeatureRequirement'", None, _errors__) + raise ValidationException( + "Trying 'SubworkflowFeatureRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -18892,7 +19157,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -18925,7 +19190,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -18938,12 +19203,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -18958,7 +19225,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -19024,7 +19291,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19036,7 +19305,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'ScatterFeatureRequirement'", None, _errors__) + raise ValidationException( + "Trying 'ScatterFeatureRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -19050,7 +19321,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -19083,7 +19354,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -19096,12 +19367,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -19116,7 +19389,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -19182,7 +19455,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19194,7 +19469,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'MultipleInputFeatureRequirement'", None, _errors__) + raise ValidationException( + "Trying 'MultipleInputFeatureRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -19208,7 +19485,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -19241,7 +19518,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -19254,12 +19531,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -19274,7 +19553,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -19340,7 +19619,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19352,7 +19633,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'StepInputExpressionRequirement'", None, _errors__) + raise ValidationException( + "Trying 'StepInputExpressionRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -19366,7 +19649,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -19399,7 +19682,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -19412,12 +19695,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -19432,7 +19717,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -19961,14 +20246,18 @@ def save( union_of_None_type_or_strtype, True, False, None ) Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") -uri_Directory_classLoader_False_True_None = _URILoader(Directory_classLoader, False, True, None) +uri_Directory_classLoader_False_True_None = _URILoader( + Directory_classLoader, False, True, None +) union_of_strtype_or_ExpressionLoader = _UnionLoader( ( strtype, ExpressionLoader, ) ) -array_of_union_of_strtype_or_ExpressionLoader = _ArrayLoader(union_of_strtype_or_ExpressionLoader) +array_of_union_of_strtype_or_ExpressionLoader = _ArrayLoader( + union_of_strtype_or_ExpressionLoader +) union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader = _UnionLoader( ( None_type, @@ -20082,13 +20371,11 @@ def save( ExpressionLoader, ) ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = ( - _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, - True, - False, - None, - ) +uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, + True, + False, + None, ) union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type = _UnionLoader( ( @@ -20219,17 +20506,17 @@ def save( uri_SchemaDefRequirement_classLoader_False_True_None = _URILoader( SchemaDefRequirement_classLoader, False, True, None ) -union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = _UnionLoader( - ( - InputRecordSchemaLoader, - InputEnumSchemaLoader, - InputArraySchemaLoader, +union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = ( + _UnionLoader( + ( + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, + ) ) ) -array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = ( - _ArrayLoader( - union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader - ) +array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = _ArrayLoader( + union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader ) union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype = _UnionLoader( ( @@ -20272,8 +20559,10 @@ def save( array_of_CommandInputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" +idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" + ) ) union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( ( @@ -20308,8 +20597,10 @@ def save( array_of_CommandOutputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" +idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" + ) ) union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( ( @@ -20362,15 +20653,13 @@ def save( CommandLineBindingLoader, ) ) -array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _ArrayLoader( - union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader +array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( + _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) ) -union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( - _UnionLoader( - ( - None_type, - array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, - ) +union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( + ( + None_type, + array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, ) ) array_of_inttype = _ArrayLoader(inttype) @@ -20380,11 +20669,15 @@ def save( array_of_inttype, ) ) -DockerRequirement_classLoader = _EnumLoader(("DockerRequirement",), "DockerRequirement_class") +DockerRequirement_classLoader = _EnumLoader( + ("DockerRequirement",), "DockerRequirement_class" +) uri_DockerRequirement_classLoader_False_True_None = _URILoader( DockerRequirement_classLoader, False, True, None ) -SoftwareRequirement_classLoader = _EnumLoader(("SoftwareRequirement",), "SoftwareRequirement_class") +SoftwareRequirement_classLoader = _EnumLoader( + ("SoftwareRequirement",), "SoftwareRequirement_class" +) uri_SoftwareRequirement_classLoader_False_True_None = _URILoader( SoftwareRequirement_classLoader, False, True, None ) @@ -20401,21 +20694,17 @@ def save( uri_InitialWorkDirRequirement_classLoader_False_True_None = _URILoader( InitialWorkDirRequirement_classLoader, False, True, None ) -union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = ( - _UnionLoader( - ( - FileLoader, - DirectoryLoader, - DirentLoader, - strtype, - ExpressionLoader, - ) +union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = _UnionLoader( + ( + FileLoader, + DirectoryLoader, + DirentLoader, + strtype, + ExpressionLoader, ) ) -array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = ( - _ArrayLoader( - union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader - ) +array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = _ArrayLoader( + union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader ) union_of_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader_or_strtype_or_ExpressionLoader = _UnionLoader( ( @@ -20424,7 +20713,9 @@ def save( ExpressionLoader, ) ) -EnvVarRequirement_classLoader = _EnumLoader(("EnvVarRequirement",), "EnvVarRequirement_class") +EnvVarRequirement_classLoader = _EnumLoader( + ("EnvVarRequirement",), "EnvVarRequirement_class" +) uri_EnvVarRequirement_classLoader_False_True_None = _URILoader( EnvVarRequirement_classLoader, False, True, None ) @@ -20438,7 +20729,9 @@ def save( uri_ShellCommandRequirement_classLoader_False_True_None = _URILoader( ShellCommandRequirement_classLoader, False, True, None ) -ResourceRequirement_classLoader = _EnumLoader(("ResourceRequirement",), "ResourceRequirement_class") +ResourceRequirement_classLoader = _EnumLoader( + ("ResourceRequirement",), "ResourceRequirement_class" +) uri_ResourceRequirement_classLoader_False_True_None = _URILoader( ResourceRequirement_classLoader, False, True, None ) @@ -20469,7 +20762,9 @@ def save( uri_ExpressionTool_classLoader_False_True_None = _URILoader( ExpressionTool_classLoader, False, True, None ) -array_of_ExpressionToolOutputParameterLoader = _ArrayLoader(ExpressionToolOutputParameterLoader) +array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( + ExpressionToolOutputParameterLoader +) idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( array_of_ExpressionToolOutputParameterLoader, "id", "type" ) @@ -20501,11 +20796,13 @@ def save( union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) ) -uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = _URILoader( - union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, - True, - False, - None, +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = ( + _URILoader( + union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, + True, + False, + None, + ) ) array_of_Any_type = _ArrayLoader(Any_type) union_of_None_type_or_array_of_Any_type = _UnionLoader( @@ -20517,12 +20814,14 @@ def save( idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( union_of_None_type_or_array_of_Any_type, "class", "None" ) -union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( - ( - strtype, - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, +union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( + _UnionLoader( + ( + strtype, + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + ) ) ) uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None = _URILoader( @@ -20544,7 +20843,9 @@ def save( union_of_None_type_or_ScatterMethodLoader, False, True, None ) Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") -uri_Workflow_classLoader_False_True_None = _URILoader(Workflow_classLoader, False, True, None) +uri_Workflow_classLoader_False_True_None = _URILoader( + Workflow_classLoader, False, True, None +) array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( array_of_WorkflowOutputParameterLoader, "id", "type" @@ -20585,8 +20886,10 @@ def save( WorkflowLoader, ) ) -array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _ArrayLoader( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader +array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( + _ArrayLoader( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader + ) ) union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( ( diff --git a/schema_salad/tests/cwl_v1_1.py b/schema_salad/tests/cwl_v1_1.py index 84b0ef910..816dd0e97 100644 --- a/schema_salad/tests/cwl_v1_1.py +++ b/schema_salad/tests/cwl_v1_1.py @@ -353,6 +353,7 @@ def add_kv( @no_type_check def iterate_through_doc(keys: List[Any]) -> Optional[CommentedMap]: + """Take a list of keys/indexes and iterates through the global CommentedMap.""" doc = doc_line_info for key in keys: if isinstance(doc, CommentedMap): @@ -405,6 +406,7 @@ def get_line_numbers(doc: Optional[CommentedMap]) -> Dict[Any, Dict[str, int]]: def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> int: + """Given a array of line column information, get the minimum column.""" min_col = 0 for line in line_numbers: if line_numbers[line]["col"] > min_col: @@ -1137,7 +1139,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, RecordField): return bool( - self.doc == other.doc and self.name == other.name and self.type == other.type + self.doc == other.doc + and self.name == other.name + and self.type == other.type ) return False @@ -1221,12 +1225,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`".format(k), + "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1251,7 +1259,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1282,7 +1290,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1295,12 +1303,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1315,7 +1325,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -1464,12 +1474,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format(k), + "invalid field `{}`, expected one of: `fields`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1492,7 +1506,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1523,7 +1537,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1536,12 +1550,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1556,7 +1572,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.fields is not None and "fields" not in r: r["fields"] = save( @@ -1692,12 +1708,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`".format(k), + "invalid field `{}`, expected one of: `symbols`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1720,7 +1740,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1751,7 +1771,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1764,12 +1784,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -1784,7 +1806,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.symbols is not None and "symbols" not in r: u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) @@ -1909,12 +1931,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format(k), + "invalid field `{}`, expected one of: `items`, `type`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -1937,7 +1963,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -1968,7 +1994,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -1981,12 +2007,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -2001,7 +2029,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.items is not None and "items" not in r: r["items"] = save( @@ -2421,7 +2449,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2460,7 +2490,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -2493,7 +2523,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -2506,12 +2536,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -2526,7 +2558,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -2852,7 +2884,9 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash((self.class_, self.location, self.path, self.basename, self.listing)) + return hash( + (self.class_, self.location, self.path, self.basename, self.listing) + ) @classmethod def fromDoc( @@ -2947,7 +2981,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -2979,7 +3015,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -3012,7 +3048,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -3025,12 +3061,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3045,7 +3083,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.location is not None and "location" not in r: u = save_relative_uri(self.location, base_url, False, None, relative_uris) @@ -3225,12 +3263,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `loadContents`".format(k), + "invalid field `{}`, expected one of: `loadContents`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -3252,7 +3294,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -3283,7 +3325,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -3296,12 +3338,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3316,7 +3360,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -3612,7 +3656,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -3650,7 +3696,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -3681,7 +3727,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -3694,12 +3740,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -3714,7 +3762,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -3837,7 +3885,9 @@ def save( shift=shift, ) if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) r["format"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4070,7 +4120,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4104,7 +4156,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4135,7 +4187,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -4148,12 +4200,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -4168,7 +4222,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4431,7 +4485,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4465,7 +4521,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4496,7 +4552,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -4509,12 +4565,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -4529,7 +4587,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -4547,7 +4605,9 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -4786,7 +4846,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -4820,7 +4882,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -4851,7 +4913,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -4864,12 +4926,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -4884,7 +4948,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5202,7 +5266,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5238,7 +5304,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -5269,7 +5335,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -5282,12 +5348,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -5302,7 +5370,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5425,7 +5493,9 @@ def save( shift=shift, ) if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) r["format"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -5448,7 +5518,9 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"]) + attrs = frozenset( + ["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"] + ) class OutputRecordSchema(RecordSchema, OutputSchema): @@ -5604,7 +5676,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5638,7 +5712,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -5669,7 +5743,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -5682,12 +5756,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -5702,7 +5778,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -5965,7 +6041,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -5999,7 +6077,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -6030,7 +6108,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -6043,12 +6121,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -6063,7 +6143,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -6081,7 +6161,9 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -6320,7 +6402,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6354,7 +6438,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -6385,7 +6469,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -6398,12 +6482,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -6418,7 +6504,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -6593,7 +6679,10 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, InlineJavascriptRequirement): - return bool(self.class_ == other.class_ and self.expressionLib == other.expressionLib) + return bool( + self.class_ == other.class_ + and self.expressionLib == other.expressionLib + ) return False def __hash__(self) -> int: @@ -6638,7 +6727,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -6652,7 +6743,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'InlineJavascriptRequirement'", None, _errors__) + raise ValidationException( + "Trying 'InlineJavascriptRequirement'", None, _errors__ + ) _constructed = cls( expressionLib=expressionLib, extension_fields=extension_fields, @@ -6667,7 +6760,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -6700,7 +6793,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -6713,12 +6806,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -6733,7 +6828,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.expressionLib is not None and "expressionLib" not in r: r["expressionLib"] = save( @@ -6846,12 +6941,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `types`".format(k), + "invalid field `{}`, expected one of: `class`, `types`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -6873,7 +6972,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -6906,7 +7005,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -6919,12 +7018,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -6939,7 +7040,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.types is not None and "types" not in r: r["types"] = save( @@ -6996,7 +7097,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, SecondaryFileSchema): - return bool(self.pattern == other.pattern and self.required == other.required) + return bool( + self.pattern == other.pattern and self.required == other.required + ) return False def __hash__(self) -> int: @@ -7052,12 +7155,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `pattern`, `required`".format(k), + "invalid field `{}`, expected one of: `pattern`, `required`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -7080,7 +7187,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -7111,7 +7218,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -7124,12 +7231,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -7144,7 +7253,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.pattern is not None and "pattern" not in r: r["pattern"] = save( @@ -7227,7 +7336,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, LoadListingRequirement): - return bool(self.class_ == other.class_ and self.loadListing == other.loadListing) + return bool( + self.class_ == other.class_ and self.loadListing == other.loadListing + ) return False def __hash__(self) -> int: @@ -7272,19 +7383,25 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `loadListing`".format(k), + "invalid field `{}`, expected one of: `class`, `loadListing`".format( + k + ), SourceLine(_doc, k, str), ) ) break if _errors__: - raise ValidationException("Trying 'LoadListingRequirement'", None, _errors__) + raise ValidationException( + "Trying 'LoadListingRequirement'", None, _errors__ + ) _constructed = cls( loadListing=loadListing, extension_fields=extension_fields, @@ -7299,7 +7416,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -7332,7 +7449,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -7345,12 +7462,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -7365,7 +7484,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.loadListing is not None and "loadListing" not in r: r["loadListing"] = save( @@ -7429,7 +7548,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, EnvironmentDef): - return bool(self.envName == other.envName and self.envValue == other.envValue) + return bool( + self.envName == other.envName and self.envValue == other.envValue + ) return False def __hash__(self) -> int: @@ -7482,12 +7603,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `envName`, `envValue`".format(k), + "invalid field `{}`, expected one of: `envName`, `envValue`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -7510,7 +7635,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -7541,7 +7666,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -7554,12 +7679,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -7574,7 +7701,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.envName is not None and "envName" not in r: r["envName"] = save( @@ -7868,7 +7995,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -7903,7 +8032,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -7934,7 +8063,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -7947,12 +8076,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -7967,7 +8098,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8278,7 +8409,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8310,7 +8443,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -8341,7 +8474,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -8354,12 +8487,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -8374,7 +8509,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.loadContents is not None and "loadContents" not in r: r["loadContents"] = save( @@ -8533,12 +8668,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `inputBinding`".format(k), + "invalid field `{}`, expected one of: `inputBinding`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -8560,7 +8699,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -8591,7 +8730,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -8604,12 +8743,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -8624,7 +8765,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.inputBinding is not None and "inputBinding" not in r: r["inputBinding"] = save( @@ -8930,7 +9071,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -8944,7 +9087,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandInputRecordField'", None, _errors__) + raise ValidationException( + "Trying 'CommandInputRecordField'", None, _errors__ + ) _constructed = cls( doc=doc, name=name, @@ -8969,7 +9114,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -9000,7 +9145,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -9013,12 +9158,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -9033,7 +9180,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9156,7 +9303,9 @@ def save( shift=shift, ) if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) r["format"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -9258,7 +9407,9 @@ def save( ) -class CommandInputRecordSchema(InputRecordSchema, CommandInputSchema, CommandLineBindable): +class CommandInputRecordSchema( + InputRecordSchema, CommandInputSchema, CommandLineBindable +): def __init__( self, type: Any, @@ -9299,7 +9450,9 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash((self.fields, self.type, self.label, self.doc, self.name, self.inputBinding)) + return hash( + (self.fields, self.type, self.label, self.doc, self.name, self.inputBinding) + ) @classmethod def fromDoc( @@ -9432,7 +9585,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9446,7 +9601,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandInputRecordSchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandInputRecordSchema'", None, _errors__ + ) _constructed = cls( fields=fields, type=type, @@ -9467,7 +9624,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -9498,7 +9655,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -9511,12 +9668,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -9531,7 +9690,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9845,7 +10004,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -9859,7 +10020,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandInputEnumSchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandInputEnumSchema'", None, _errors__ + ) _constructed = cls( symbols=symbols, type=type, @@ -9880,7 +10043,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -9911,7 +10074,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -9924,12 +10087,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -9944,7 +10109,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -9962,7 +10127,9 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -10072,7 +10239,9 @@ def save( attrs = frozenset(["symbols", "type", "label", "doc", "name", "inputBinding"]) -class CommandInputArraySchema(InputArraySchema, CommandInputSchema, CommandLineBindable): +class CommandInputArraySchema( + InputArraySchema, CommandInputSchema, CommandLineBindable +): def __init__( self, items: Any, @@ -10113,7 +10282,9 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash((self.items, self.type, self.label, self.doc, self.name, self.inputBinding)) + return hash( + (self.items, self.type, self.label, self.doc, self.name, self.inputBinding) + ) @classmethod def fromDoc( @@ -10243,7 +10414,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10257,7 +10430,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandInputArraySchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandInputArraySchema'", None, _errors__ + ) _constructed = cls( items=items, type=type, @@ -10278,7 +10453,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -10309,7 +10484,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -10322,12 +10497,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -10342,7 +10519,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10703,7 +10880,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -10717,7 +10896,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputRecordField'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputRecordField'", None, _errors__ + ) _constructed = cls( doc=doc, name=name, @@ -10740,7 +10921,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -10771,7 +10952,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -10784,12 +10965,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -10804,7 +10987,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -10927,7 +11110,9 @@ def save( shift=shift, ) if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) r["format"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11138,7 +11323,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11152,7 +11339,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputRecordSchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputRecordSchema'", None, _errors__ + ) _constructed = cls( fields=fields, type=type, @@ -11172,7 +11361,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -11203,7 +11392,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -11216,12 +11405,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -11236,7 +11427,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -11499,7 +11690,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11513,7 +11706,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputEnumSchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputEnumSchema'", None, _errors__ + ) _constructed = cls( symbols=symbols, type=type, @@ -11533,7 +11728,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -11564,7 +11759,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -11577,12 +11772,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -11597,7 +11794,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -11615,7 +11812,9 @@ def save( shift=shift, ) if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, str(self.name), True, None, relative_uris) + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) r["symbols"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -11854,7 +12053,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -11868,7 +12069,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputArraySchema'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputArraySchema'", None, _errors__ + ) _constructed = cls( items=items, type=type, @@ -11888,7 +12091,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -11919,7 +12122,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -11932,12 +12135,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -11952,7 +12157,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) @@ -12362,7 +12567,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -12402,7 +12609,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -12418,7 +12625,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -12448,7 +12655,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -12461,12 +12668,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -12481,7 +12690,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -12959,7 +13168,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -12973,7 +13184,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'CommandOutputParameter'", None, _errors__) + raise ValidationException( + "Trying 'CommandOutputParameter'", None, _errors__ + ) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -12996,7 +13209,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -13012,7 +13225,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -13042,7 +13255,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -13055,12 +13268,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -13075,7 +13290,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -13668,7 +13883,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -13713,7 +13930,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -13729,7 +13946,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -13761,7 +13978,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -13774,12 +13991,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -13794,7 +14013,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -13938,7 +14157,9 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -14391,7 +14612,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -14425,7 +14648,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -14458,7 +14681,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -14471,12 +14694,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -14491,7 +14716,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.dockerPull is not None and "dockerPull" not in r: r["dockerPull"] = save( @@ -14710,12 +14935,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `packages`".format(k), + "invalid field `{}`, expected one of: `class`, `packages`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -14737,7 +14966,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -14770,7 +14999,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -14783,12 +15012,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -14803,7 +15034,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.packages is not None and "packages" not in r: r["packages"] = save( @@ -14940,7 +15171,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -14971,7 +15204,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -15002,7 +15235,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -15015,12 +15248,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -15035,7 +15270,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.package is not None and "package" not in r: r["package"] = save( @@ -15216,7 +15451,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -15247,7 +15484,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -15278,7 +15515,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -15291,12 +15528,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -15311,7 +15550,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.entryname is not None and "entryname" not in r: r["entryname"] = save( @@ -15455,19 +15694,25 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `listing`".format(k), + "invalid field `{}`, expected one of: `class`, `listing`".format( + k + ), SourceLine(_doc, k, str), ) ) break if _errors__: - raise ValidationException("Trying 'InitialWorkDirRequirement'", None, _errors__) + raise ValidationException( + "Trying 'InitialWorkDirRequirement'", None, _errors__ + ) _constructed = cls( listing=listing, extension_fields=extension_fields, @@ -15482,7 +15727,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -15515,7 +15760,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -15528,12 +15773,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -15548,7 +15795,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.listing is not None and "listing" not in r: r["listing"] = save( @@ -15652,12 +15899,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `envDef`".format(k), + "invalid field `{}`, expected one of: `class`, `envDef`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -15679,7 +15930,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -15712,7 +15963,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -15725,12 +15976,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -15745,7 +15998,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.envDef is not None and "envDef" not in r: r["envDef"] = save( @@ -15837,7 +16090,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -15849,7 +16104,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'ShellCommandRequirement'", None, _errors__) + raise ValidationException( + "Trying 'ShellCommandRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -15863,7 +16120,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -15896,7 +16153,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -15909,12 +16166,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -15929,7 +16188,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -16195,7 +16454,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -16231,7 +16492,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -16264,7 +16525,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -16277,12 +16538,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -16297,7 +16560,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.coresMin is not None and "coresMin" not in r: r["coresMin"] = save( @@ -16525,7 +16788,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, WorkReuse): - return bool(self.class_ == other.class_ and self.enableReuse == other.enableReuse) + return bool( + self.class_ == other.class_ and self.enableReuse == other.enableReuse + ) return False def __hash__(self) -> int: @@ -16567,12 +16832,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `enableReuse`".format(k), + "invalid field `{}`, expected one of: `class`, `enableReuse`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -16594,7 +16863,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -16627,7 +16896,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -16640,12 +16909,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -16660,7 +16931,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.enableReuse is not None and "enableReuse" not in r: r["enableReuse"] = save( @@ -16735,7 +17006,10 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, NetworkAccess): - return bool(self.class_ == other.class_ and self.networkAccess == other.networkAccess) + return bool( + self.class_ == other.class_ + and self.networkAccess == other.networkAccess + ) return False def __hash__(self) -> int: @@ -16777,7 +17051,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -16806,7 +17082,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -16839,7 +17115,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -16852,12 +17128,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -16872,7 +17150,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.networkAccess is not None and "networkAccess" not in r: r["networkAccess"] = save( @@ -16962,7 +17240,10 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, InplaceUpdateRequirement): - return bool(self.class_ == other.class_ and self.inplaceUpdate == other.inplaceUpdate) + return bool( + self.class_ == other.class_ + and self.inplaceUpdate == other.inplaceUpdate + ) return False def __hash__(self) -> int: @@ -17004,7 +17285,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17018,7 +17301,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'InplaceUpdateRequirement'", None, _errors__) + raise ValidationException( + "Trying 'InplaceUpdateRequirement'", None, _errors__ + ) _constructed = cls( inplaceUpdate=inplaceUpdate, extension_fields=extension_fields, @@ -17033,7 +17318,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -17066,7 +17351,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -17079,12 +17364,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -17099,7 +17386,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.inplaceUpdate is not None and "inplaceUpdate" not in r: r["inplaceUpdate"] = save( @@ -17166,7 +17453,9 @@ def __init__( def __eq__(self, other: Any) -> bool: if isinstance(other, ToolTimeLimit): - return bool(self.class_ == other.class_ and self.timelimit == other.timelimit) + return bool( + self.class_ == other.class_ and self.timelimit == other.timelimit + ) return False def __hash__(self) -> int: @@ -17208,12 +17497,16 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `timelimit`".format(k), + "invalid field `{}`, expected one of: `class`, `timelimit`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -17235,7 +17528,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -17268,7 +17561,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -17281,12 +17574,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -17301,7 +17596,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.timelimit is not None and "timelimit" not in r: r["timelimit"] = save( @@ -17541,7 +17836,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -17555,7 +17852,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'ExpressionToolOutputParameter'", None, _errors__) + raise ValidationException( + "Trying 'ExpressionToolOutputParameter'", None, _errors__ + ) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -17577,7 +17876,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -17593,7 +17892,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -17623,7 +17922,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -17636,12 +17935,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -17656,7 +17957,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -17802,7 +18103,9 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"]) + attrs = frozenset( + ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] + ) class WorkflowInputParameter(InputParameter): @@ -18098,7 +18401,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -18112,7 +18417,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'WorkflowInputParameter'", None, _errors__) + raise ValidationException( + "Trying 'WorkflowInputParameter'", None, _errors__ + ) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -18138,7 +18445,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -18154,7 +18461,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -18184,7 +18491,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -18197,12 +18504,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -18217,7 +18526,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18725,7 +19034,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -18763,7 +19074,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -18779,7 +19090,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -18811,7 +19122,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -18824,12 +19135,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -18844,7 +19157,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -18988,7 +19301,9 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19305,7 +19620,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19319,7 +19636,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'WorkflowOutputParameter'", None, _errors__) + raise ValidationException( + "Trying 'WorkflowOutputParameter'", None, _errors__ + ) _constructed = cls( label=label, secondaryFiles=secondaryFiles, @@ -19343,7 +19662,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -19359,7 +19678,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -19389,7 +19708,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -19402,12 +19721,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -19422,7 +19743,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -19539,7 +19860,9 @@ def save( shift=shift, ) if self.outputSource is not None and "outputSource" not in r: - u = save_relative_uri(self.outputSource, str(self.id), False, 1, relative_uris) + u = save_relative_uri( + self.outputSource, str(self.id), False, 1, relative_uris + ) r["outputSource"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -19900,7 +20223,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -19937,7 +20262,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -19953,7 +20278,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -19983,7 +20308,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -19996,12 +20321,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -20016,7 +20343,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -20279,7 +20606,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -20307,7 +20636,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -20323,7 +20652,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -20353,7 +20682,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -20366,12 +20695,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -20386,7 +20717,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -20685,7 +21016,7 @@ def fromDoc( else: hints = None - subscope_baseuri = expand_url("run", baseuri, loadingOptions, True) + subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) try: run = load_field( _doc.get("run"), @@ -20741,7 +21072,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -20780,7 +21113,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -20796,7 +21129,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -20826,7 +21159,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -20839,12 +21172,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -20859,7 +21194,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -21027,7 +21362,9 @@ def save( shift=shift, ) if self.scatterMethod is not None and "scatterMethod" not in r: - u = save_relative_uri(self.scatterMethod, str(self.id), False, None, relative_uris) + u = save_relative_uri( + self.scatterMethod, str(self.id), False, None, relative_uris + ) r["scatterMethod"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21365,7 +21702,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -21403,7 +21742,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -21419,7 +21758,7 @@ def save( if doc: if self.id: temp_id = self.id - if len(temp_id.split("#")) > 1: + if len(temp_id.split('#')) > 1: temp_id = self.id.split("#")[1] if temp_id in doc: keys.append(temp_id) @@ -21451,7 +21790,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -21464,12 +21803,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -21484,7 +21825,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) if self.id is not None and "id" not in r: u = save_relative_uri(self.id, base_url, True, None, relative_uris) @@ -21628,7 +21969,9 @@ def save( shift=shift, ) if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri(self.cwlVersion, str(self.id), False, None, relative_uris) + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) r["cwlVersion"] = u max_len, inserted_line_info = add_kv( old_doc=doc, @@ -21740,7 +22083,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -21752,7 +22097,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'SubworkflowFeatureRequirement'", None, _errors__) + raise ValidationException( + "Trying 'SubworkflowFeatureRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -21766,7 +22113,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -21799,7 +22146,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -21812,12 +22159,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -21832,7 +22181,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -21898,7 +22247,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -21910,7 +22261,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'ScatterFeatureRequirement'", None, _errors__) + raise ValidationException( + "Trying 'ScatterFeatureRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -21924,7 +22277,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -21957,7 +22310,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -21970,12 +22323,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -21990,7 +22345,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -22056,7 +22411,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -22068,7 +22425,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'MultipleInputFeatureRequirement'", None, _errors__) + raise ValidationException( + "Trying 'MultipleInputFeatureRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -22082,7 +22441,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -22115,7 +22474,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -22128,12 +22487,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -22148,7 +22509,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -22214,7 +22575,9 @@ def fromDoc( for k in _doc.keys(): if k not in cls.attrs: if ":" in k: - ex = expand_url(k, "", loadingOptions, scoped_id=False, vocab_term=False) + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( @@ -22226,7 +22589,9 @@ def fromDoc( break if _errors__: - raise ValidationException("Trying 'StepInputExpressionRequirement'", None, _errors__) + raise ValidationException( + "Trying 'StepInputExpressionRequirement'", None, _errors__ + ) _constructed = cls( extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -22240,7 +22605,7 @@ def save( relative_uris: bool = True, keys: Optional[List[Any]] = None, inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, + shift: int = 0 ) -> CommentedMap: if keys is None: keys = [] @@ -22273,7 +22638,7 @@ def save( if isinstance(key, str): if hasattr(self, key): if getattr(self, key) is not None: - if key != "class": + if key != 'class': line = doc.lc.data[key][0] + shift if inserted_line_info: while line in inserted_line_info: @@ -22286,12 +22651,14 @@ def save( relative_uris=relative_uris, keys=keys + [key], inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # If the returned value is a list of size 1, just save the value in the list if type(saved_val) == list: - if len(saved_val) == 1: + if ( + len(saved_val) == 1 + ): saved_val = saved_val[0] r[key] = saved_val @@ -22306,7 +22673,7 @@ def save( min_col=min_col, max_len=max_len, inserted_line_info=inserted_line_info, - shift=shift, + shift=shift ) # top refers to the directory level @@ -22941,14 +23308,16 @@ def save( array_of_union_of_FileLoader_or_DirectoryLoader, ) ) -secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = ( - _SecondaryDSLLoader(union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader) +secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _SecondaryDSLLoader( + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader ) uri_union_of_None_type_or_strtype_True_False_None = _URILoader( union_of_None_type_or_strtype, True, False, None ) Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") -uri_Directory_classLoader_False_True_None = _URILoader(Directory_classLoader, False, True, None) +uri_Directory_classLoader_False_True_None = _URILoader( + Directory_classLoader, False, True, None +) union_of_None_type_or_booltype = _UnionLoader( ( None_type, @@ -22962,13 +23331,11 @@ def save( ) ) array_of_SecondaryFileSchemaLoader = _ArrayLoader(SecondaryFileSchemaLoader) -union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = ( - _UnionLoader( - ( - None_type, - SecondaryFileSchemaLoader, - array_of_SecondaryFileSchemaLoader, - ) +union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _UnionLoader( + ( + None_type, + SecondaryFileSchemaLoader, + array_of_SecondaryFileSchemaLoader, ) ) secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _SecondaryDSLLoader( @@ -22982,13 +23349,11 @@ def save( ExpressionLoader, ) ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = ( - _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, - True, - False, - None, - ) +uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, + True, + False, + None, ) union_of_None_type_or_strtype_or_ExpressionLoader = _UnionLoader( ( @@ -23086,15 +23451,13 @@ def save( WorkflowInputParameterLoader, ) ) -array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = _ArrayLoader( - union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader +array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = ( + _ArrayLoader(union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader) ) -idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = ( - _IdMapLoader( - array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader, - "id", - "type", - ) +idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = _IdMapLoader( + array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader, + "id", + "type", ) union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader = _UnionLoader( ( @@ -23298,8 +23661,10 @@ def save( array_of_CommandInputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" +idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" + ) ) union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( ( @@ -23340,8 +23705,10 @@ def save( array_of_CommandOutputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" +idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" + ) ) union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( ( @@ -23393,15 +23760,13 @@ def save( CommandLineBindingLoader, ) ) -array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _ArrayLoader( - union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader +array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( + _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) ) -union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( - _UnionLoader( - ( - None_type, - array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, - ) +union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( + ( + None_type, + array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, ) ) array_of_inttype = _ArrayLoader(inttype) @@ -23411,11 +23776,15 @@ def save( array_of_inttype, ) ) -DockerRequirement_classLoader = _EnumLoader(("DockerRequirement",), "DockerRequirement_class") +DockerRequirement_classLoader = _EnumLoader( + ("DockerRequirement",), "DockerRequirement_class" +) uri_DockerRequirement_classLoader_False_True_None = _URILoader( DockerRequirement_classLoader, False, True, None ) -SoftwareRequirement_classLoader = _EnumLoader(("SoftwareRequirement",), "SoftwareRequirement_class") +SoftwareRequirement_classLoader = _EnumLoader( + ("SoftwareRequirement",), "SoftwareRequirement_class" +) uri_SoftwareRequirement_classLoader_False_True_None = _URILoader( SoftwareRequirement_classLoader, False, True, None ) @@ -23451,7 +23820,9 @@ def save( ExpressionLoader, ) ) -EnvVarRequirement_classLoader = _EnumLoader(("EnvVarRequirement",), "EnvVarRequirement_class") +EnvVarRequirement_classLoader = _EnumLoader( + ("EnvVarRequirement",), "EnvVarRequirement_class" +) uri_EnvVarRequirement_classLoader_False_True_None = _URILoader( EnvVarRequirement_classLoader, False, True, None ) @@ -23465,12 +23836,16 @@ def save( uri_ShellCommandRequirement_classLoader_False_True_None = _URILoader( ShellCommandRequirement_classLoader, False, True, None ) -ResourceRequirement_classLoader = _EnumLoader(("ResourceRequirement",), "ResourceRequirement_class") +ResourceRequirement_classLoader = _EnumLoader( + ("ResourceRequirement",), "ResourceRequirement_class" +) uri_ResourceRequirement_classLoader_False_True_None = _URILoader( ResourceRequirement_classLoader, False, True, None ) WorkReuse_classLoader = _EnumLoader(("WorkReuse",), "WorkReuse_class") -uri_WorkReuse_classLoader_False_True_None = _URILoader(WorkReuse_classLoader, False, True, None) +uri_WorkReuse_classLoader_False_True_None = _URILoader( + WorkReuse_classLoader, False, True, None +) union_of_booltype_or_ExpressionLoader = _UnionLoader( ( booltype, @@ -23511,7 +23886,9 @@ def save( idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader( array_of_WorkflowInputParameterLoader, "id", "type" ) -array_of_ExpressionToolOutputParameterLoader = _ArrayLoader(ExpressionToolOutputParameterLoader) +array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( + ExpressionToolOutputParameterLoader +) idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( array_of_ExpressionToolOutputParameterLoader, "id", "type" ) @@ -23543,11 +23920,13 @@ def save( union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) ) -uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = _URILoader( - union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, - True, - False, - None, +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = ( + _URILoader( + union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, + True, + False, + None, + ) ) array_of_Any_type = _ArrayLoader(Any_type) union_of_None_type_or_array_of_Any_type = _UnionLoader( @@ -23559,12 +23938,14 @@ def save( idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( union_of_None_type_or_array_of_Any_type, "class", "None" ) -union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( - ( - strtype, - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, +union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( + _UnionLoader( + ( + strtype, + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + ) ) ) uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None = _URILoader( @@ -23586,7 +23967,9 @@ def save( union_of_None_type_or_ScatterMethodLoader, False, True, None ) Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") -uri_Workflow_classLoader_False_True_None = _URILoader(Workflow_classLoader, False, True, None) +uri_Workflow_classLoader_False_True_None = _URILoader( + Workflow_classLoader, False, True, None +) array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( array_of_WorkflowOutputParameterLoader, "id", "type" @@ -23627,8 +24010,10 @@ def save( WorkflowLoader, ) ) -array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _ArrayLoader( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader +array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( + _ArrayLoader( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader + ) ) union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( ( diff --git a/schema_salad/tests/cwl_v1_2.py b/schema_salad/tests/cwl_v1_2.py index 331864dde..e53ecd07c 100644 --- a/schema_salad/tests/cwl_v1_2.py +++ b/schema_salad/tests/cwl_v1_2.py @@ -353,6 +353,7 @@ def add_kv( @no_type_check def iterate_through_doc(keys: List[Any]) -> Optional[CommentedMap]: + """Take a list of keys/indexes and iterates through the global CommentedMap.""" doc = doc_line_info for key in keys: if isinstance(doc, CommentedMap): @@ -405,6 +406,7 @@ def get_line_numbers(doc: Optional[CommentedMap]) -> Dict[Any, Dict[str, int]]: def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> int: + """Given a array of line column information, get the minimum column.""" min_col = 0 for line in line_numbers: if line_numbers[line]["col"] > min_col: From 63da12182ecfe3c8d8e2f6da0741c7ea9ca2da48 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Thu, 8 Jun 2023 11:15:37 -0600 Subject: [PATCH 40/44] Trying to pass metaschema up to date test --- schema_salad/python_codegen_support.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index 9233f3d17..802880db3 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -350,6 +350,7 @@ def add_kv( @no_type_check def iterate_through_doc(keys: List[Any]) -> Optional[CommentedMap]: + """Take a list of keys/indexes and iterates through the global CommentedMap.""" doc = doc_line_info for key in keys: if isinstance(doc, CommentedMap): @@ -402,6 +403,7 @@ def get_line_numbers(doc: Optional[CommentedMap]) -> Dict[Any, Dict[str, int]]: def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> int: + """Given a array of line column information, get the minimum column.""" min_col = 0 for line in line_numbers: if line_numbers[line]["col"] > min_col: From ba8be89e4e86d3cc17576bc5d2c797828db0a080 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Fri, 9 Jun 2023 15:59:16 -0600 Subject: [PATCH 41/44] trying alternate style of loading test files in --- schema_salad/tests/test_line_numbers.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/schema_salad/tests/test_line_numbers.py b/schema_salad/tests/test_line_numbers.py index ea090d2f9..76b8596db 100644 --- a/schema_salad/tests/test_line_numbers.py +++ b/schema_salad/tests/test_line_numbers.py @@ -127,8 +127,9 @@ def load_document_by_uri(path: str) -> Any: baseuri = str(real_path) loadingOptions = cwl_v1_2.LoadingOptions(fileuri=baseuri) - - doc = loadingOptions.fetcher.fetch_text(real_path) + # doc = loadingOptions.fetcher.fetch_text(real_path) + with open(path, 'r') as file: + doc = file.read() yaml = yaml_no_ts() doc = yaml.load(doc) From be53207e622621913584b665f2ff6992484a90e3 Mon Sep 17 00:00:00 2001 From: acoleman2000 Date: Thu, 15 Jun 2023 09:42:10 -0600 Subject: [PATCH 42/44] Bogus commit to re-run testS From 93406ddb3ae0e18b286758c7ae906c234a543ffc Mon Sep 17 00:00:00 2001 From: Alex Coleman Date: Tue, 14 Nov 2023 12:07:00 -0700 Subject: [PATCH 43/44] Updating line numbers tests to use generated cwl files. --- schema_salad/metaschema.py | 3 - schema_salad/python_codegen_support.py | 3 - schema_salad/tests/test_line_numbers.py | 85 +++++++++++++------ .../tests/test_outputs_before_inputs.cwl | 2 +- .../tests/test_secondary_files_dsl.cwl | 2 +- schema_salad/tests/test_type_dsl.cwl | 2 +- 6 files changed, 62 insertions(+), 35 deletions(-) diff --git a/schema_salad/metaschema.py b/schema_salad/metaschema.py index 6847e8a2d..c659889b5 100644 --- a/schema_salad/metaschema.py +++ b/schema_salad/metaschema.py @@ -1136,13 +1136,10 @@ def _document_load( doc.pop("$schemas") if "$base" in doc: doc.pop("$base") -<<<<<<< HEAD if isinstance(doc, CommentedMap): global doc_line_info doc_line_info = doc -======= ->>>>>>> main if "$graph" in doc: loadingOptions.idx[baseuri] = ( diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index b31c71c65..9c60c5a75 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -1133,13 +1133,10 @@ def _document_load( doc.pop("$schemas") if "$base" in doc: doc.pop("$base") -<<<<<<< HEAD if isinstance(doc, CommentedMap): global doc_line_info doc_line_info = doc -======= ->>>>>>> main if "$graph" in doc: loadingOptions.idx[baseuri] = ( diff --git a/schema_salad/tests/test_line_numbers.py b/schema_salad/tests/test_line_numbers.py index 76b8596db..283acac84 100644 --- a/schema_salad/tests/test_line_numbers.py +++ b/schema_salad/tests/test_line_numbers.py @@ -1,23 +1,25 @@ -# from parser import load_document_by_uri, save +import importlib from pathlib import Path -from typing import Any, MutableSequence, Optional, cast -from urllib.parse import unquote_plus, urlparse +from typing import Any, Dict, List, MutableSequence, Optional, Union, cast +from urllib.parse import urlparse from ruamel.yaml.comments import CommentedMap -import schema_salad.tests.cwl_v1_2 as cwl_v1_2 from schema_salad.utils import yaml_no_ts +from schema_salad import codegen +from schema_salad.avro.schema import Names +from schema_salad.exceptions import ValidationException +from schema_salad.schema import load_schema -from .util import get_data +from .util import get_data, cwl_file_uri - -def test_secondary_files_dsl() -> None: +def test_secondary_files_dsl(tmp_path: Path) -> None: """ Checks object is properly saving when dsl is used """ t = "test_secondary_files_dsl.cwl" path = get_data("tests/" + t) - obj = load_document_by_uri(str(path)) + obj = load_document_by_uri(tmp_path, str(path)) saved_obj = obj.save() assert isinstance(saved_obj, CommentedMap) assert saved_obj.lc.data == { @@ -49,13 +51,13 @@ def test_secondary_files_dsl() -> None: } -def test_outputs_before_inputs() -> None: +def test_outputs_before_inputs(tmp_path: Path) -> None: """ Tests when output comes in cwl file before inputs """ t = "test_outputs_before_inputs.cwl" path = get_data("tests/" + t) - obj = load_document_by_uri(str(path)) + obj = load_document_by_uri(tmp_path, str(path)) saved_obj = obj.save() assert isinstance(saved_obj, CommentedMap) assert { @@ -80,7 +82,7 @@ def test_outputs_before_inputs() -> None: } -def test_type_dsl() -> None: +def test_type_dsl(tmp_path: Path) -> None: """ Checks object is properly saving when type DSL is used. In this example, type for the input is File? which should expand to @@ -88,7 +90,7 @@ def test_type_dsl() -> None: """ t = "test_type_dsl.cwl" path = get_data("tests/" + t) - obj = load_document_by_uri(str(path)) + obj = load_document_by_uri(tmp_path, str(path)) saved_obj = obj.save() assert isinstance(saved_obj, CommentedMap) assert { @@ -114,28 +116,37 @@ def test_type_dsl() -> None: assert saved_obj["outputs"][0]["outputBinding"].lc.data == {"glob": [15, 6, 15, 12]} -def load_document_by_uri(path: str) -> Any: - """ - Takes in a path and loads it via the python codegen. - """ - uri = urlparse(path) - if not uri.scheme or uri.scheme == "file": - real_path = Path(unquote_plus(uri.path)).resolve().as_uri() +def load_document_by_uri(tmp_path: Path, path: Union[str, Path]) -> Any: + src_target = tmp_path / "cwl_v1_0.py" + python_codegen(cwl_file_uri, src_target) + spec = importlib.util.spec_from_file_location("cwl_v1_0", src_target) + assert isinstance(spec, importlib.machinery.ModuleSpec) + assert isinstance(spec.loader, importlib.abc.Loader) + temp_cwl_v1_0 = importlib.util.module_from_spec(spec) + spec.loader.exec_module(temp_cwl_v1_0) + cwl_v1_0: Any = temp_cwl_v1_0 + + if isinstance(path, str): + uri = urlparse(path) + if not uri.scheme or uri.scheme == "file": + real_path = Path(uri.path).resolve().as_uri() + else: + real_path = path else: - real_path = path + real_path = path.resolve().as_uri() baseuri = str(real_path) - loadingOptions = cwl_v1_2.LoadingOptions(fileuri=baseuri) - # doc = loadingOptions.fetcher.fetch_text(real_path) - with open(path, 'r') as file: - doc = file.read() + loadingOptions = cwl_v1_0.LoadingOptions(fileuri=baseuri) + with open(path, "r") as file: + doc = file.read() + # doc = loadingOptions.fetcher.fetch_text(urllib.parse.unquote(str(real_path))) yaml = yaml_no_ts() doc = yaml.load(doc) - result = cwl_v1_2.load_document_by_yaml( - doc, baseuri, cast(Optional[cwl_v1_2.LoadingOptions], loadingOptions) + result = cwl_v1_0.load_document_by_yaml( + doc, baseuri, cast(Optional[cwl_v1_0.LoadingOptions], loadingOptions) ) if isinstance(result, MutableSequence): @@ -144,3 +155,25 @@ def load_document_by_uri(path: str) -> Any: lst.append(r) return lst return result + + + +def python_codegen( + file_uri: str, + target: Path, + parser_info: Optional[str] = None, + package: Optional[str] = None, +) -> None: + document_loader, avsc_names, schema_metadata, metaschema_loader = load_schema(file_uri) + assert isinstance(avsc_names, Names) + schema_raw_doc = metaschema_loader.fetch(file_uri) + schema_doc, schema_metadata = metaschema_loader.resolve_all(schema_raw_doc, file_uri) + codegen.codegen( + "python", + cast(List[Dict[str, Any]], schema_doc), + schema_metadata, + document_loader, + target=str(target), + parser_info=parser_info, + package=package, + ) diff --git a/schema_salad/tests/test_outputs_before_inputs.cwl b/schema_salad/tests/test_outputs_before_inputs.cwl index e1594c790..063e29119 100644 --- a/schema_salad/tests/test_outputs_before_inputs.cwl +++ b/schema_salad/tests/test_outputs_before_inputs.cwl @@ -1,5 +1,5 @@ class: CommandLineTool -cwlVersion: v1.2 +cwlVersion: v1.0 baseCommand: python3 outputs: diff --git a/schema_salad/tests/test_secondary_files_dsl.cwl b/schema_salad/tests/test_secondary_files_dsl.cwl index 9b08bc9e9..61d37ad0f 100644 --- a/schema_salad/tests/test_secondary_files_dsl.cwl +++ b/schema_salad/tests/test_secondary_files_dsl.cwl @@ -1,5 +1,5 @@ class: CommandLineTool -cwlVersion: v1.2 +cwlVersion: v1.0 baseCommand: python3 inputs: diff --git a/schema_salad/tests/test_type_dsl.cwl b/schema_salad/tests/test_type_dsl.cwl index 5b822d812..8735505b0 100644 --- a/schema_salad/tests/test_type_dsl.cwl +++ b/schema_salad/tests/test_type_dsl.cwl @@ -1,5 +1,5 @@ class: CommandLineTool -cwlVersion: v1.2 +cwlVersion: v1.0 baseCommand: python3 inputs: From 154af867d7351193255719adec294364f1852aac Mon Sep 17 00:00:00 2001 From: Alex Coleman Date: Tue, 14 Nov 2023 12:07:54 -0700 Subject: [PATCH 44/44] Removing static cwl files. --- schema_salad/tests/cwl_v1_0.py | 20982 ------------------------ schema_salad/tests/cwl_v1_1.py | 24106 ---------------------------- schema_salad/tests/cwl_v1_2.py | 26303 ------------------------------- 3 files changed, 71391 deletions(-) delete mode 100644 schema_salad/tests/cwl_v1_0.py delete mode 100644 schema_salad/tests/cwl_v1_1.py delete mode 100644 schema_salad/tests/cwl_v1_2.py diff --git a/schema_salad/tests/cwl_v1_0.py b/schema_salad/tests/cwl_v1_0.py deleted file mode 100644 index c0d9ac0c1..000000000 --- a/schema_salad/tests/cwl_v1_0.py +++ /dev/null @@ -1,20982 +0,0 @@ -# -# This file was autogenerated using schema-salad-tool --codegen=python -# The code itself is released under the Apache 2.0 license and the help text is -# subject to the license of the original schema. -import copy -import logging -import os -import pathlib -import re -import tempfile -import uuid as _uuid__ # pylint: disable=unused-import # noqa: F401 -import xml.sax # nosec -from abc import ABC, abstractmethod -from io import StringIO -from typing import ( - Any, - Dict, - List, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, - no_type_check, -) -from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit -from urllib.request import pathname2url - -from rdflib import Graph -from rdflib.plugins.parsers.notation3 import BadSyntax -from ruamel.yaml.comments import CommentedMap, CommentedSeq - -from schema_salad.exceptions import SchemaSaladException, ValidationException -from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher -from schema_salad.sourceline import SourceLine, add_lc_filename -from schema_salad.utils import CacheType, yaml_no_ts # requires schema-salad v8.2+ - -_vocab: Dict[str, str] = {} -_rvocab: Dict[str, str] = {} - -_logger = logging.getLogger("salad") - - -IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] - -doc_line_info = CommentedMap() - - -class LoadingOptions: - idx: IdxType - fileuri: Optional[str] - baseuri: str - namespaces: MutableMapping[str, str] - schemas: MutableSequence[str] - original_doc: Optional[Any] - addl_metadata: MutableMapping[str, Any] - fetcher: Fetcher - vocab: Dict[str, str] - rvocab: Dict[str, str] - cache: CacheType - imports: List[str] - includes: List[str] - - def __init__( - self, - fetcher: Optional[Fetcher] = None, - namespaces: Optional[Dict[str, str]] = None, - schemas: Optional[List[str]] = None, - fileuri: Optional[str] = None, - copyfrom: Optional["LoadingOptions"] = None, - original_doc: Optional[Any] = None, - addl_metadata: Optional[Dict[str, str]] = None, - baseuri: Optional[str] = None, - idx: Optional[IdxType] = None, - imports: Optional[List[str]] = None, - includes: Optional[List[str]] = None, - ) -> None: - """Create a LoadingOptions object.""" - self.original_doc = original_doc - - if idx is not None: - self.idx = idx - else: - self.idx = copyfrom.idx if copyfrom is not None else {} - - if fileuri is not None: - self.fileuri = fileuri - else: - self.fileuri = copyfrom.fileuri if copyfrom is not None else None - - if baseuri is not None: - self.baseuri = baseuri - else: - self.baseuri = copyfrom.baseuri if copyfrom is not None else "" - - if namespaces is not None: - self.namespaces = namespaces - else: - self.namespaces = copyfrom.namespaces if copyfrom is not None else {} - - if schemas is not None: - self.schemas = schemas - else: - self.schemas = copyfrom.schemas if copyfrom is not None else [] - - if addl_metadata is not None: - self.addl_metadata = addl_metadata - else: - self.addl_metadata = copyfrom.addl_metadata if copyfrom is not None else {} - - if imports is not None: - self.imports = imports - else: - self.imports = copyfrom.imports if copyfrom is not None else [] - - if includes is not None: - self.includes = includes - else: - self.includes = copyfrom.includes if copyfrom is not None else [] - - if fetcher is not None: - self.fetcher = fetcher - elif copyfrom is not None: - self.fetcher = copyfrom.fetcher - else: - import requests - from cachecontrol.caches import FileCache - from cachecontrol.wrapper import CacheControl - - root = pathlib.Path(os.environ.get("HOME", tempfile.gettempdir())) - session = CacheControl( - requests.Session(), - cache=FileCache(root / ".cache" / "salad"), - ) - self.fetcher: Fetcher = DefaultFetcher({}, session) - - self.cache = self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} - - self.vocab = _vocab - self.rvocab = _rvocab - - if namespaces is not None: - self.vocab = self.vocab.copy() - self.rvocab = self.rvocab.copy() - for k, v in namespaces.items(): - self.vocab[k] = v - self.rvocab[v] = k - - @property - def graph(self) -> Graph: - """Generate a merged rdflib.Graph from all entries in self.schemas.""" - graph = Graph() - if not self.schemas: - return graph - key = str(hash(tuple(self.schemas))) - if key in self.cache: - return cast(Graph, self.cache[key]) - for schema in self.schemas: - fetchurl = ( - self.fetcher.urljoin(self.fileuri, schema) - if self.fileuri is not None - else pathlib.Path(schema).resolve().as_uri() - ) - if fetchurl not in self.cache or self.cache[fetchurl] is True: - _logger.debug("Getting external schema %s", fetchurl) - try: - content = self.fetcher.fetch_text(fetchurl) - except Exception as e: - _logger.warning("Could not load extension schema %s: %s", fetchurl, str(e)) - continue - newGraph = Graph() - err_msg = "unknown error" - for fmt in ["xml", "turtle"]: - try: - newGraph.parse(data=content, format=fmt, publicID=str(fetchurl)) - self.cache[fetchurl] = newGraph - graph += newGraph - break - except (xml.sax.SAXParseException, TypeError, BadSyntax) as e: - err_msg = str(e) - else: - _logger.warning("Could not load extension schema %s: %s", fetchurl, err_msg) - self.cache[key] = graph - return graph - - -class Saveable(ABC): - """Mark classes than have a save() and fromDoc() function.""" - - @classmethod - @abstractmethod - def fromDoc( - cls, - _doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "Saveable": - """Construct this object from the result of yaml.load().""" - - @abstractmethod - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, - ) -> CommentedMap: - """Convert this object to a JSON/YAML friendly dictionary.""" - - -def load_field(val, fieldtype, baseuri, loadingOptions): - # type: (Union[str, Dict[str, str]], _Loader, str, LoadingOptions) -> Any - if isinstance(val, MutableMapping): - if "$import" in val: - if loadingOptions.fileuri is None: - raise SchemaSaladException("Cannot load $import without fileuri") - url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]) - result, metadata = _document_load_by_url( - fieldtype, - url, - loadingOptions, - ) - loadingOptions.imports.append(url) - return result - if "$include" in val: - if loadingOptions.fileuri is None: - raise SchemaSaladException("Cannot load $import without fileuri") - url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) - val = loadingOptions.fetcher.fetch_text(url) - loadingOptions.includes.append(url) - return fieldtype.load(val, baseuri, loadingOptions) - - -save_type = Optional[Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str]] - - -def add_kv( - old_doc: CommentedMap, - new_doc: CommentedMap, - line_numbers: Dict[Any, Dict[str, int]], - key: str, - val: Any, - max_len: int, - cols: Dict[int, int], - min_col: int = 0, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, -) -> Tuple[int, Optional[Dict[int, int]]]: - """Add key value pair into Commented Map. - - Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers - for each key/val pair in the old CommentedMap,key/val pair to insert, max_line of the old CommentedMap, - and max col value taken for each line. - """ - if inserted_line_info is None: - inserted_line_info = {} - - if len(inserted_line_info.keys()) >= 1: - max_line = max(inserted_line_info.keys()) + 1 - else: - max_line = 0 - - if key in line_numbers: # If the passed key to insert is in the original CommentedMap as a key - line_info = old_doc.lc.data[key] # Get the line information for the key - if ( - line_info[0] + shift not in inserted_line_info - ): # If the line of the key + shift isn't taken, add it - new_doc.lc.add_kv_line_col( - key, - [ - old_doc.lc.data[key][0] + shift, - old_doc.lc.data[key][1], - old_doc.lc.data[key][2] + shift, - old_doc.lc.data[key][3], - ], - ) - inserted_line_info[old_doc.lc.data[key][0] + shift] = old_doc.lc.data[key][1] - else: # If the line is already taken - line = line_info[0] + shift - while line in inserted_line_info.keys(): # Find the closest free line - line += 1 - new_doc.lc.add_kv_line_col( - key, - [ - line, - old_doc.lc.data[key][1], - line + (line - old_doc.lc.data[key][2]), - old_doc.lc.data[key][3], - ], - ) - inserted_line_info[line] = old_doc.lc.data[key][1] - return max_len, inserted_line_info - elif isinstance(val, (int, float, str)) and not isinstance( - val, bool - ): # If the value is hashable - if val in line_numbers: # If the value is in the original CommentedMap - line = line_numbers[val]["line"] + shift # Get the line info for the value - if line in inserted_line_info: # Get the appropriate line to place value on - line = max_line - - col = line_numbers[val]["col"] - new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) - inserted_line_info[line] = col + len(key) + 2 - return max_len, inserted_line_info - elif isinstance(val, str): # Logic for DSL expansition with "?" - if val + "?" in line_numbers: - line = line_numbers[val + "?"]["line"] + shift - if line in inserted_line_info: - line = max_line - col = line_numbers[val + "?"]["col"] - new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) - inserted_line_info[line] = col + len(key) + 2 - return max_len, inserted_line_info - elif old_doc: - if val in old_doc: - index = old_doc.lc.data.index(val) - line_info = old_doc.lc.data[index] - if line_info[0] + shift not in inserted_line_info: - new_doc.lc.add_kv_line_col( - key, - [ - old_doc.lc.data[index][0] + shift, - old_doc.lc.data[index][1], - old_doc.lc.data[index][2] + shift, - old_doc.lc.data[index][3], - ], - ) - inserted_line_info[old_doc.lc.data[index][0] + shift] = old_doc.lc.data[index][ - 1 - ] - else: - new_doc.lc.add_kv_line_col( - key, - [ - max_line + shift, - old_doc.lc.data[index][1], - max_line + (max_line - old_doc.lc.data[index][2]) + shift, - old_doc.lc.data[index][3], - ], - ) - inserted_line_info[max_line + shift] = old_doc.lc.data[index][1] - # If neither the key or value is in the original CommentedMap/old doc (or value is not hashable) - new_doc.lc.add_kv_line_col(key, [max_line, min_col, max_line, min_col + len(key) + 2]) - inserted_line_info[max_line] = min_col + len(key) + 2 - return max_len + 1, inserted_line_info - - -@no_type_check -def iterate_through_doc(keys: List[Any]) -> Optional[CommentedMap]: - """Take a list of keys/indexes and iterates through the global CommentedMap.""" - doc = doc_line_info - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - return None - else: - return None - if isinstance(doc, CommentedSeq): - to_return = CommentedMap() - for index, key in enumerate(doc): - to_return[key] = "" - to_return.lc.add_kv_line_col( - key, - [ - doc.lc.data[index][0], - doc.lc.data[index][1], - doc.lc.data[index][0], - doc.lc.data[index][1], - ], - ) - return to_return - return doc - - -def get_line_numbers(doc: Optional[CommentedMap]) -> Dict[Any, Dict[str, int]]: - """Get line numbers for kv pairs in CommentedMap. - - For each key/value pair in a CommentedMap, save the line/col info into a dictionary, - only save value info if value is hashable. - """ - line_numbers: Dict[Any, Dict[str, int]] = {} - if doc is None: - return {} - if doc.lc.data is None: - return {} - for key, value in doc.lc.data.items(): - line_numbers[key] = {} - - line_numbers[key]["line"] = doc.lc.data[key][0] - line_numbers[key]["col"] = doc.lc.data[key][1] - if isinstance(value, (int, float, bool, str)): - line_numbers[value] = {} - line_numbers[value]["line"] = doc.lc.data[key][2] - line_numbers[value]["col"] = doc.lc.data[key][3] - return line_numbers - - -def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> int: - """Given a array of line column information, get the minimum column.""" - min_col = 0 - for line in line_numbers: - if line_numbers[line]["col"] > min_col: - min_col = line_numbers[line]["col"] - return min_col - - -def get_max_line_num(doc: CommentedMap) -> int: - """Get the max line number for a CommentedMap. - - Iterate through the the key with the highest line number until you reach a non-CommentedMap value - or empty CommentedMap. - """ - max_line = 0 - max_key = "" - cur = doc - while isinstance(cur, CommentedMap) and len(cur) > 0: - for key in cur.lc.data.keys(): - if cur.lc.data[key][2] >= max_line: - max_line = cur.lc.data[key][2] - max_key = key - cur = cur[max_key] - return max_line + 1 - - -def save( - val: Any, - top: bool = True, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, -) -> save_type: - """Save a val of any type. - - Recursively calls save method from class if val is of type Saveable. - Otherwise, saves val to CommentedMap or CommentedSeq. - """ - if keys is None: - keys = [] - - doc = iterate_through_doc(keys) - - if isinstance(val, Saveable): - return val.save( - top=top, - base_url=base_url, - relative_uris=relative_uris, - keys=keys, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if isinstance(val, MutableSequence): - r = CommentedSeq() - r.lc.data = {} - for i in range(0, len(val)): - new_keys = keys - if doc: - if str(i) in doc: - r.lc.data[i] = doc.lc.data[i] - new_keys.append(i) - r.append( - save( - val[i], - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=new_keys, - inserted_line_info=inserted_line_info, - shift=shift, - ) - ) - return r - - if isinstance(val, MutableMapping): - newdict = CommentedMap() - new_keys = keys - for key in val: - - if doc: - if key in doc: - newdict.lc.add_kv_line_col(key, doc.lc.data[key]) - new_keys.append(key) - - newdict[key] = save( - val[key], - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=new_keys, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - return newdict - if val is None or isinstance(val, (int, float, bool, str)): - return val - raise Exception("Not Saveable: %s" % type(val)) - - -def save_with_metadata( - val: Any, - valLoadingOpts: LoadingOptions, - top: bool = True, - base_url: str = "", - relative_uris: bool = True, -) -> save_type: - """Save and set $namespaces, $schemas, $base and any other metadata fields at the top level.""" - saved_val = save(val, top, base_url, relative_uris) - newdict: MutableMapping[str, Any] = {} - if isinstance(saved_val, MutableSequence): - newdict = {"$graph": saved_val} - elif isinstance(saved_val, MutableMapping): - newdict = saved_val - - if valLoadingOpts.namespaces: - newdict["$namespaces"] = valLoadingOpts.namespaces - if valLoadingOpts.schemas: - newdict["$schemas"] = valLoadingOpts.schemas - if valLoadingOpts.baseuri: - newdict["$base"] = valLoadingOpts.baseuri - for k, v in valLoadingOpts.addl_metadata.items(): - if k not in newdict: - newdict[k] = v - - return newdict - - -def expand_url( - url, # type: str - base_url, # type: str - loadingOptions, # type: LoadingOptions - scoped_id=False, # type: bool - vocab_term=False, # type: bool - scoped_ref=None, # type: Optional[int] -): - # type: (...) -> str - if url in ("@id", "@type"): - return url - - if vocab_term and url in loadingOptions.vocab: - return url - - if bool(loadingOptions.vocab) and ":" in url: - prefix = url.split(":")[0] - if prefix in loadingOptions.vocab: - url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :] - - split = urlsplit(url) - - if ( - (bool(split.scheme) and split.scheme in loadingOptions.fetcher.supported_schemes()) - or url.startswith("$(") - or url.startswith("${") - ): - pass - elif scoped_id and not bool(split.fragment): - splitbase = urlsplit(base_url) - frg = "" - if bool(splitbase.fragment): - frg = splitbase.fragment + "/" + split.path - else: - frg = split.path - pt = splitbase.path if splitbase.path != "" else "/" - url = urlunsplit((splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg)) - elif scoped_ref is not None and not bool(split.fragment): - splitbase = urlsplit(base_url) - sp = splitbase.fragment.split("/") - n = scoped_ref - while n > 0 and len(sp) > 0: - sp.pop() - n -= 1 - sp.append(url) - url = urlunsplit( - ( - splitbase.scheme, - splitbase.netloc, - splitbase.path, - splitbase.query, - "/".join(sp), - ) - ) - else: - url = loadingOptions.fetcher.urljoin(base_url, url) - - if vocab_term: - split = urlsplit(url) - if bool(split.scheme): - if url in loadingOptions.rvocab: - return loadingOptions.rvocab[url] - else: - raise ValidationException(f"Term {url!r} not in vocabulary") - - return url - - -class _Loader: - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - pass - - -class _AnyLoader(_Loader): - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if doc is not None: - return doc - raise ValidationException("Expected non-null") - - -class _PrimitiveLoader(_Loader): - def __init__(self, tp): - # type: (Union[type, Tuple[Type[str], Type[str]]]) -> None - self.tp = tp - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if not isinstance(doc, self.tp): - raise ValidationException( - "Expected a {} but got {}".format( - self.tp.__class__.__name__, doc.__class__.__name__ - ) - ) - return doc - - def __repr__(self): # type: () -> str - return str(self.tp) - - -class _ArrayLoader(_Loader): - def __init__(self, items): - # type: (_Loader) -> None - self.items = items - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if not isinstance(doc, MutableSequence): - raise ValidationException(f"Expected a list, was {type(doc)}") - r = [] # type: List[Any] - errors = [] # type: List[SchemaSaladException] - for i in range(0, len(doc)): - try: - lf = load_field(doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions) - if isinstance(lf, MutableSequence): - r.extend(lf) - else: - r.append(lf) - except ValidationException as e: - errors.append(e.with_sourceline(SourceLine(doc, i, str))) - if errors: - raise ValidationException("", None, errors) - return r - - def __repr__(self): # type: () -> str - return f"array<{self.items}>" - - -class _EnumLoader(_Loader): - def __init__(self, symbols: Sequence[str], name: str) -> None: - self.symbols = symbols - self.name = name - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if doc in self.symbols: - return doc - raise ValidationException(f"Expected one of {self.symbols}") - - def __repr__(self): # type: () -> str - return self.name - - -class _SecondaryDSLLoader(_Loader): - def __init__(self, inner): - # type: (_Loader) -> None - self.inner = inner - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - r: List[Dict[str, Any]] = [] - if isinstance(doc, MutableSequence): - for d in doc: - if isinstance(d, str): - if d.endswith("?"): - r.append({"pattern": d[:-1], "required": False}) - else: - r.append({"pattern": d}) - elif isinstance(d, dict): - new_dict: Dict[str, Any] = {} - dict_copy = copy.deepcopy(d) - if "pattern" in dict_copy: - new_dict["pattern"] = dict_copy.pop("pattern") - else: - raise ValidationException( - f"Missing pattern in secondaryFiles specification entry: {d}" - ) - new_dict["required"] = ( - dict_copy.pop("required") if "required" in dict_copy else None - ) - - if len(dict_copy): - raise ValidationException( - "Unallowed values in secondaryFiles specification entry: {}".format( - dict_copy - ) - ) - r.append(new_dict) - - else: - raise ValidationException( - "Expected a string or sequence of (strings or mappings)." - ) - elif isinstance(doc, MutableMapping): - new_dict = {} - doc_copy = copy.deepcopy(doc) - if "pattern" in doc_copy: - new_dict["pattern"] = doc_copy.pop("pattern") - else: - raise ValidationException( - f"Missing pattern in secondaryFiles specification entry: {doc}" - ) - new_dict["required"] = doc_copy.pop("required") if "required" in doc_copy else None - - if len(doc_copy): - raise ValidationException( - f"Unallowed values in secondaryFiles specification entry: {doc_copy}" - ) - r.append(new_dict) - - elif isinstance(doc, str): - if doc.endswith("?"): - r.append({"pattern": doc[:-1], "required": False}) - else: - r.append({"pattern": doc}) - else: - raise ValidationException("Expected str or sequence of str") - return self.inner.load(r, baseuri, loadingOptions, docRoot) - - -class _RecordLoader(_Loader): - def __init__(self, classtype): - # type: (Type[Saveable]) -> None - self.classtype = classtype - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if not isinstance(doc, MutableMapping): - raise ValidationException(f"Expected a dict, was {type(doc)}") - return self.classtype.fromDoc(doc, baseuri, loadingOptions, docRoot=docRoot) - - def __repr__(self): # type: () -> str - return str(self.classtype.__name__) - - -class _ExpressionLoader(_Loader): - def __init__(self, items: Type[str]) -> None: - self.items = items - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if not isinstance(doc, str): - raise ValidationException(f"Expected a str, was {type(doc)}") - return doc - - -class _UnionLoader(_Loader): - def __init__(self, alternates: Sequence[_Loader]) -> None: - self.alternates = alternates - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - errors = [] - for t in self.alternates: - try: - return t.load(doc, baseuri, loadingOptions, docRoot=docRoot) - except ValidationException as e: - errors.append(ValidationException(f"tried {t} but", None, [e])) - raise ValidationException("", None, errors, "-") - - def __repr__(self): # type: () -> str - return " | ".join(str(a) for a in self.alternates) - - -class _URILoader(_Loader): - def __init__(self, inner, scoped_id, vocab_term, scoped_ref): - # type: (_Loader, bool, bool, Union[int, None]) -> None - self.inner = inner - self.scoped_id = scoped_id - self.vocab_term = vocab_term - self.scoped_ref = scoped_ref - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if isinstance(doc, MutableSequence): - newdoc = [] - for i in doc: - if isinstance(i, str): - newdoc.append( - expand_url( - i, - baseuri, - loadingOptions, - self.scoped_id, - self.vocab_term, - self.scoped_ref, - ) - ) - else: - newdoc.append(i) - doc = newdoc - elif isinstance(doc, str): - doc = expand_url( - doc, - baseuri, - loadingOptions, - self.scoped_id, - self.vocab_term, - self.scoped_ref, - ) - return self.inner.load(doc, baseuri, loadingOptions) - - -class _TypeDSLLoader(_Loader): - typeDSLregex = re.compile(r"^([^[?]+)(\[\])?(\?)?$") - - def __init__(self, inner, refScope): - # type: (_Loader, Union[int, None]) -> None - self.inner = inner - self.refScope = refScope - - def resolve( - self, - doc, # type: str - baseuri, # type: str - loadingOptions, # type: LoadingOptions - ): - # type: (...) -> Union[List[Union[Dict[str, str], str]], Dict[str, str], str] - m = self.typeDSLregex.match(doc) - if m: - group1 = m.group(1) - assert group1 is not None # nosec - first = expand_url(group1, baseuri, loadingOptions, False, True, self.refScope) - second = third = None - if bool(m.group(2)): - second = {"type": "array", "items": first} - # second = CommentedMap((("type", "array"), - # ("items", first))) - # second.lc.add_kv_line_col("type", lc) - # second.lc.add_kv_line_col("items", lc) - # second.lc.filename = filename - if bool(m.group(3)): - third = ["null", second or first] - # third = CommentedSeq(["null", second or first]) - # third.lc.add_kv_line_col(0, lc) - # third.lc.add_kv_line_col(1, lc) - # third.lc.filename = filename - return third or second or first - return doc - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if isinstance(doc, MutableSequence): - r = [] # type: List[Any] - for d in doc: - if isinstance(d, str): - resolved = self.resolve(d, baseuri, loadingOptions) - if isinstance(resolved, MutableSequence): - for i in resolved: - if i not in r: - r.append(i) - else: - if resolved not in r: - r.append(resolved) - else: - r.append(d) - doc = r - elif isinstance(doc, str): - doc = self.resolve(doc, baseuri, loadingOptions) - - return self.inner.load(doc, baseuri, loadingOptions) - - -class _IdMapLoader(_Loader): - def __init__(self, inner, mapSubject, mapPredicate): - # type: (_Loader, str, Union[str, None]) -> None - self.inner = inner - self.mapSubject = mapSubject - self.mapPredicate = mapPredicate - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if isinstance(doc, MutableMapping): - r = [] # type: List[Any] - for k in sorted(doc.keys()): - val = doc[k] - if isinstance(val, CommentedMap): - v = copy.copy(val) - v.lc.data = val.lc.data - v.lc.filename = val.lc.filename - v[self.mapSubject] = k - r.append(v) - elif isinstance(val, MutableMapping): - v2 = copy.copy(val) - v2[self.mapSubject] = k - r.append(v2) - else: - if self.mapPredicate: - v3 = {self.mapPredicate: val} - v3[self.mapSubject] = k - r.append(v3) - else: - raise ValidationException("No mapPredicate") - doc = r - return self.inner.load(doc, baseuri, loadingOptions) - - -def _document_load( - loader: _Loader, - doc: Union[CommentedMap, str, MutableMapping[str, Any], MutableSequence[Any]], - baseuri: str, - loadingOptions: LoadingOptions, - addl_metadata_fields: Optional[MutableSequence[str]] = None, -) -> Tuple[Any, LoadingOptions]: - if isinstance(doc, str): - return _document_load_by_url( - loader, - loadingOptions.fetcher.urljoin(baseuri, doc), - loadingOptions, - addl_metadata_fields=addl_metadata_fields, - ) - - if isinstance(doc, MutableMapping): - addl_metadata = {} - if addl_metadata_fields is not None: - for mf in addl_metadata_fields: - if mf in doc: - addl_metadata[mf] = doc[mf] - - docuri = baseuri - if "$base" in doc: - baseuri = doc["$base"] - - loadingOptions = LoadingOptions( - copyfrom=loadingOptions, - namespaces=doc.get("$namespaces", None), - schemas=doc.get("$schemas", None), - baseuri=doc.get("$base", None), - addl_metadata=addl_metadata, - ) - - doc = copy.copy(doc) - if "$namespaces" in doc: - doc.pop("$namespaces") - if "$schemas" in doc: - doc.pop("$schemas") - if "$base" in doc: - doc.pop("$base") - - if isinstance(doc, CommentedMap): - global doc_line_info - doc_line_info = doc - - if "$graph" in doc: - loadingOptions.idx[baseuri] = ( - loader.load(doc["$graph"], baseuri, loadingOptions), - loadingOptions, - ) - else: - loadingOptions.idx[baseuri] = ( - loader.load(doc, baseuri, loadingOptions, docRoot=baseuri), - loadingOptions, - ) - - if docuri != baseuri: - loadingOptions.idx[docuri] = loadingOptions.idx[baseuri] - - return loadingOptions.idx[baseuri] - if isinstance(doc, MutableSequence): - loadingOptions.idx[baseuri] = ( - loader.load(doc, baseuri, loadingOptions), - loadingOptions, - ) - return loadingOptions.idx[baseuri] - - raise ValidationException( - "Expected URI string, MutableMapping or MutableSequence, got %s" % type(doc) - ) - - -def _document_load_by_url( - loader: _Loader, - url: str, - loadingOptions: LoadingOptions, - addl_metadata_fields: Optional[MutableSequence[str]] = None, -) -> Tuple[Any, LoadingOptions]: - if url in loadingOptions.idx: - return loadingOptions.idx[url] - - doc_url, frg = urldefrag(url) - - text = loadingOptions.fetcher.fetch_text(doc_url) - textIO = StringIO(text) - textIO.name = str(doc_url) - yaml = yaml_no_ts() - result = yaml.load(textIO) - add_lc_filename(result, doc_url) - - loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=doc_url) - - _document_load( - loader, - result, - doc_url, - loadingOptions, - addl_metadata_fields=addl_metadata_fields, - ) - - return loadingOptions.idx[url] - - -def file_uri(path, split_frag=False): # type: (str, bool) -> str - if path.startswith("file://"): - return path - if split_frag: - pathsp = path.split("#", 2) - frag = "#" + quote(str(pathsp[1])) if len(pathsp) == 2 else "" - urlpath = pathname2url(str(pathsp[0])) - else: - urlpath = pathname2url(path) - frag = "" - if urlpath.startswith("//"): - return f"file:{urlpath}{frag}" - return f"file://{urlpath}{frag}" - - -def prefix_url(url: str, namespaces: Dict[str, str]) -> str: - """Expand short forms into full URLs using the given namespace dictionary.""" - for k, v in namespaces.items(): - if url.startswith(v): - return k + ":" + url[len(v) :] - return url - - -def save_relative_uri( - uri: Any, - base_url: str, - scoped_id: bool, - ref_scope: Optional[int], - relative_uris: bool, -) -> Any: - """Convert any URI to a relative one, obeying the scoping rules.""" - if isinstance(uri, MutableSequence): - return [save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) for u in uri] - elif isinstance(uri, str): - if not relative_uris or uri == base_url: - return uri - urisplit = urlsplit(uri) - basesplit = urlsplit(base_url) - if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: - if urisplit.path != basesplit.path: - p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) - if urisplit.fragment: - p = p + "#" + urisplit.fragment - return p - - basefrag = basesplit.fragment + "/" - if ref_scope: - sp = basefrag.split("/") - i = 0 - while i < ref_scope: - sp.pop() - i += 1 - basefrag = "/".join(sp) - - if urisplit.fragment.startswith(basefrag): - return urisplit.fragment[len(basefrag) :] - return urisplit.fragment - return uri - else: - return save(uri, top=False, base_url=base_url, relative_uris=relative_uris) - - -def shortname(inputid: str) -> str: - """ - Compute the shortname of a fully qualified identifier. - - See https://w3id.org/cwl/v1.2/SchemaSalad.html#Short_names. - """ - parsed_id = urlparse(inputid) - if parsed_id.fragment: - return parsed_id.fragment.split("/")[-1] - return parsed_id.path.split("/")[-1] - - -def parser_info() -> str: - return "org.w3id.cwl.v1_0" - - -class RecordField(Saveable): - """ - A field of a record. - """ - - def __init__( - self, - name: Any, - type: Any, - doc: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.name = name - self.doc = doc - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, RecordField): - return bool( - self.name == other.name - and self.doc == other.doc - and self.type == other.type - ) - return False - - def __hash__(self) -> int: - return hash((self.name, self.doc, self.type)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "RecordField": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - raise ValidationException("Missing name") - if not __original_name_is_none: - baseuri = name - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `name`, `doc`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'RecordField'", None, _errors__) - _constructed = cls( - name=name, - doc=doc, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["name", "doc", "type"]) - - -class RecordSchema(Saveable): - def __init__( - self, - type: Any, - fields: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, RecordSchema): - return bool(self.fields == other.fields and self.type == other.type) - return False - - def __hash__(self) -> int: - return hash((self.fields, self.type)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "RecordSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'fields' field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - ) - ) - else: - fields = None - try: - type = load_field( - _doc.get("type"), - typedsl_Record_symbolLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'RecordSchema'", None, _errors__) - _constructed = cls( - fields=fields, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.fields is not None and "fields" not in r: - r["fields"] = save( - self.fields, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="fields", - val=r.get("fields"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type"]) - - -class EnumSchema(Saveable): - """ - Define an enumerated type. - - """ - - def __init__( - self, - symbols: Any, - type: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.symbols = symbols - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, EnumSchema): - return bool(self.symbols == other.symbols and self.type == other.type) - return False - - def __hash__(self) -> int: - return hash((self.symbols, self.type)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "EnumSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'symbols' field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Enum_symbolLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'EnumSchema'", None, _errors__) - _constructed = cls( - symbols=symbols, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) - r["symbols"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="symbols", - val=r.get("symbols"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["symbols", "type"]) - - -class ArraySchema(Saveable): - def __init__( - self, - items: Any, - type: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ArraySchema): - return bool(self.items == other.items and self.type == other.type) - return False - - def __hash__(self) -> int: - return hash((self.items, self.type)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ArraySchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - items = load_field( - _doc.get("items"), - typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'items' field is not valid because:", - SourceLine(_doc, "items", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Array_symbolLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'ArraySchema'", None, _errors__) - _constructed = cls( - items=items, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.items is not None and "items" not in r: - r["items"] = save( - self.items, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="items", - val=r.get("items"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type"]) - - -class File(Saveable): - """ - Represents a file (or group of files when `secondaryFiles` is provided) that - will be accessible by tools using standard POSIX file system call API such as - open(2) and read(2). - - Files are represented as objects with `class` of `File`. File objects have - a number of properties that provide metadata about the file. - - The `location` property of a File is a URI that uniquely identifies the - file. Implementations must support the file:// URI scheme and may support - other schemes such as http://. The value of `location` may also be a - relative reference, in which case it must be resolved relative to the URI - of the document it appears in. Alternately to `location`, implementations - must also accept the `path` property on File, which must be a filesystem - path available on the same host as the CWL runner (for inputs) or the - runtime environment of a command line tool execution (for command line tool - outputs). - - If no `location` or `path` is specified, a file object must specify - `contents` with the UTF-8 text content of the file. This is a "file - literal". File literals do not correspond to external resources, but are - created on disk with `contents` with when needed for a executing a tool. - Where appropriate, expressions can return file literals to define new files - on a runtime. The maximum size of `contents` is 64 kilobytes. - - The `basename` property defines the filename on disk where the file is - staged. This may differ from the resource name. If not provided, - `basename` must be computed from the last path part of `location` and made - available to expressions. - - The `secondaryFiles` property is a list of File or Directory objects that - must be staged in the same directory as the primary file. It is an error - for file names to be duplicated in `secondaryFiles`. - - The `size` property is the size in bytes of the File. It must be computed - from the resource and made available to expressions. The `checksum` field - contains a cryptographic hash of the file content for use it verifying file - contents. Implementations may, at user option, enable or disable - computation of the `checksum` field for performance or other reasons. - However, the ability to compute output checksums is required to pass the - CWL conformance test suite. - - When executing a CommandLineTool, the files and secondary files may be - staged to an arbitrary directory, but must use the value of `basename` for - the filename. The `path` property must be file path in the context of the - tool execution runtime (local to the compute node, or within the executing - container). All computed properties should be available to expressions. - File literals also must be staged and `path` must be set. - - When collecting CommandLineTool outputs, `glob` matching returns file paths - (with the `path` property) and the derived properties. This can all be - modified by `outputEval`. Alternately, if the file `cwl.output.json` is - present in the output, `outputBinding` is ignored. - - File objects in the output must provide either a `location` URI or a `path` - property in the context of the tool execution runtime (local to the compute - node, or within the executing container). - - When evaluating an ExpressionTool, file objects must be referenced via - `location` (the expression tool does not have access to files on disk so - `path` is meaningless) or as file literals. It is legal to return a file - object with an existing `location` but a different `basename`. The - `loadContents` field of ExpressionTool inputs behaves the same as on - CommandLineTool inputs, however it is not meaningful on the outputs. - - An ExpressionTool may forward file references from input to output by using - the same value for `location`. - - """ - - def __init__( - self, - location: Optional[Any] = None, - path: Optional[Any] = None, - basename: Optional[Any] = None, - dirname: Optional[Any] = None, - nameroot: Optional[Any] = None, - nameext: Optional[Any] = None, - checksum: Optional[Any] = None, - size: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - format: Optional[Any] = None, - contents: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "File" - self.location = location - self.path = path - self.basename = basename - self.dirname = dirname - self.nameroot = nameroot - self.nameext = nameext - self.checksum = checksum - self.size = size - self.secondaryFiles = secondaryFiles - self.format = format - self.contents = contents - - def __eq__(self, other: Any) -> bool: - if isinstance(other, File): - return bool( - self.class_ == other.class_ - and self.location == other.location - and self.path == other.path - and self.basename == other.basename - and self.dirname == other.dirname - and self.nameroot == other.nameroot - and self.nameext == other.nameext - and self.checksum == other.checksum - and self.size == other.size - and self.secondaryFiles == other.secondaryFiles - and self.format == other.format - and self.contents == other.contents - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.class_, - self.location, - self.path, - self.basename, - self.dirname, - self.nameroot, - self.nameext, - self.checksum, - self.size, - self.secondaryFiles, - self.format, - self.contents, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "File": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "File": - raise ValidationException("Not a File") - - if "location" in _doc: - try: - location = load_field( - _doc.get("location"), - uri_union_of_None_type_or_strtype_False_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'location' field is not valid because:", - SourceLine(_doc, "location", str), - [e], - ) - ) - else: - location = None - if "path" in _doc: - try: - path = load_field( - _doc.get("path"), - uri_union_of_None_type_or_strtype_False_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'path' field is not valid because:", - SourceLine(_doc, "path", str), - [e], - ) - ) - else: - path = None - if "basename" in _doc: - try: - basename = load_field( - _doc.get("basename"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'basename' field is not valid because:", - SourceLine(_doc, "basename", str), - [e], - ) - ) - else: - basename = None - if "dirname" in _doc: - try: - dirname = load_field( - _doc.get("dirname"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dirname' field is not valid because:", - SourceLine(_doc, "dirname", str), - [e], - ) - ) - else: - dirname = None - if "nameroot" in _doc: - try: - nameroot = load_field( - _doc.get("nameroot"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'nameroot' field is not valid because:", - SourceLine(_doc, "nameroot", str), - [e], - ) - ) - else: - nameroot = None - if "nameext" in _doc: - try: - nameext = load_field( - _doc.get("nameext"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'nameext' field is not valid because:", - SourceLine(_doc, "nameext", str), - [e], - ) - ) - else: - nameext = None - if "checksum" in _doc: - try: - checksum = load_field( - _doc.get("checksum"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'checksum' field is not valid because:", - SourceLine(_doc, "checksum", str), - [e], - ) - ) - else: - checksum = None - if "size" in _doc: - try: - size = load_field( - _doc.get("size"), - union_of_None_type_or_inttype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'size' field is not valid because:", - SourceLine(_doc, "size", str), - [e], - ) - ) - else: - size = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "contents" in _doc: - try: - contents = load_field( - _doc.get("contents"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'contents' field is not valid because:", - SourceLine(_doc, "contents", str), - [e], - ) - ) - else: - contents = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `dirname`, `nameroot`, `nameext`, `checksum`, `size`, `secondaryFiles`, `format`, `contents`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'File'", None, _errors__) - _constructed = cls( - location=location, - path=path, - basename=basename, - dirname=dirname, - nameroot=nameroot, - nameext=nameext, - checksum=checksum, - size=size, - secondaryFiles=secondaryFiles, - format=format, - contents=contents, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "File" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.location is not None and "location" not in r: - u = save_relative_uri(self.location, base_url, False, None, relative_uris) - r["location"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="location", - val=r.get("location"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.path is not None and "path" not in r: - u = save_relative_uri(self.path, base_url, False, None, relative_uris) - r["path"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="path", - val=r.get("path"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.basename is not None and "basename" not in r: - r["basename"] = save( - self.basename, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="basename", - val=r.get("basename"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dirname is not None and "dirname" not in r: - r["dirname"] = save( - self.dirname, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dirname", - val=r.get("dirname"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.nameroot is not None and "nameroot" not in r: - r["nameroot"] = save( - self.nameroot, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="nameroot", - val=r.get("nameroot"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.nameext is not None and "nameext" not in r: - r["nameext"] = save( - self.nameext, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="nameext", - val=r.get("nameext"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.checksum is not None and "checksum" not in r: - r["checksum"] = save( - self.checksum, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="checksum", - val=r.get("checksum"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.size is not None and "size" not in r: - r["size"] = save( - self.size, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="size", - val=r.get("size"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, base_url, True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.contents is not None and "contents" not in r: - r["contents"] = save( - self.contents, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="contents", - val=r.get("contents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "class", - "location", - "path", - "basename", - "dirname", - "nameroot", - "nameext", - "checksum", - "size", - "secondaryFiles", - "format", - "contents", - ] - ) - - -class Directory(Saveable): - """ - Represents a directory to present to a command line tool. - - Directories are represented as objects with `class` of `Directory`. Directory objects have - a number of properties that provide metadata about the directory. - - The `location` property of a Directory is a URI that uniquely identifies - the directory. Implementations must support the file:// URI scheme and may - support other schemes such as http://. Alternately to `location`, - implementations must also accept the `path` property on Directory, which - must be a filesystem path available on the same host as the CWL runner (for - inputs) or the runtime environment of a command line tool execution (for - command line tool outputs). - - A Directory object may have a `listing` field. This is a list of File and - Directory objects that are contained in the Directory. For each entry in - `listing`, the `basename` property defines the name of the File or - Subdirectory when staged to disk. If `listing` is not provided, the - implementation must have some way of fetching the Directory listing at - runtime based on the `location` field. - - If a Directory does not have `location`, it is a Directory literal. A - Directory literal must provide `listing`. Directory literals must be - created on disk at runtime as needed. - - The resources in a Directory literal do not need to have any implied - relationship in their `location`. For example, a Directory listing may - contain two files located on different hosts. It is the responsibility of - the runtime to ensure that those files are staged to disk appropriately. - Secondary files associated with files in `listing` must also be staged to - the same Directory. - - When executing a CommandLineTool, Directories must be recursively staged - first and have local values of `path` assigend. - - Directory objects in CommandLineTool output must provide either a - `location` URI or a `path` property in the context of the tool execution - runtime (local to the compute node, or within the executing container). - - An ExpressionTool may forward file references from input to output by using - the same value for `location`. - - Name conflicts (the same `basename` appearing multiple times in `listing` - or in any entry in `secondaryFiles` in the listing) is a fatal error. - - """ - - def __init__( - self, - location: Optional[Any] = None, - path: Optional[Any] = None, - basename: Optional[Any] = None, - listing: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "Directory" - self.location = location - self.path = path - self.basename = basename - self.listing = listing - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Directory): - return bool( - self.class_ == other.class_ - and self.location == other.location - and self.path == other.path - and self.basename == other.basename - and self.listing == other.listing - ) - return False - - def __hash__(self) -> int: - return hash( - (self.class_, self.location, self.path, self.basename, self.listing) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "Directory": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "Directory": - raise ValidationException("Not a Directory") - - if "location" in _doc: - try: - location = load_field( - _doc.get("location"), - uri_union_of_None_type_or_strtype_False_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'location' field is not valid because:", - SourceLine(_doc, "location", str), - [e], - ) - ) - else: - location = None - if "path" in _doc: - try: - path = load_field( - _doc.get("path"), - uri_union_of_None_type_or_strtype_False_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'path' field is not valid because:", - SourceLine(_doc, "path", str), - [e], - ) - ) - else: - path = None - if "basename" in _doc: - try: - basename = load_field( - _doc.get("basename"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'basename' field is not valid because:", - SourceLine(_doc, "basename", str), - [e], - ) - ) - else: - basename = None - if "listing" in _doc: - try: - listing = load_field( - _doc.get("listing"), - union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'listing' field is not valid because:", - SourceLine(_doc, "listing", str), - [e], - ) - ) - else: - listing = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `listing`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'Directory'", None, _errors__) - _constructed = cls( - location=location, - path=path, - basename=basename, - listing=listing, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "Directory" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.location is not None and "location" not in r: - u = save_relative_uri(self.location, base_url, False, None, relative_uris) - r["location"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="location", - val=r.get("location"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.path is not None and "path" not in r: - u = save_relative_uri(self.path, base_url, False, None, relative_uris) - r["path"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="path", - val=r.get("path"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.basename is not None and "basename" not in r: - r["basename"] = save( - self.basename, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="basename", - val=r.get("basename"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.listing is not None and "listing" not in r: - r["listing"] = save( - self.listing, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="listing", - val=r.get("listing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "location", "path", "basename", "listing"]) - - -class SchemaBase(Saveable): - pass - - -class Parameter(SchemaBase): - """ - Define an input or output parameter to a process. - - """ - - pass - - -class InputBinding(Saveable): - pass - - -class OutputBinding(Saveable): - pass - - -class InputSchema(SchemaBase): - pass - - -class OutputSchema(SchemaBase): - pass - - -class InputRecordField(RecordField): - def __init__( - self, - name: Any, - type: Any, - doc: Optional[Any] = None, - inputBinding: Optional[Any] = None, - label: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.name = name - self.doc = doc - self.type = type - self.inputBinding = inputBinding - self.label = label - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InputRecordField): - return bool( - self.name == other.name - and self.doc == other.doc - and self.type == other.type - and self.inputBinding == other.inputBinding - and self.label == other.label - ) - return False - - def __hash__(self) -> int: - return hash((self.name, self.doc, self.type, self.inputBinding, self.label)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InputRecordField": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - raise ValidationException("Missing name") - if not __original_name_is_none: - baseuri = name - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `name`, `doc`, `type`, `inputBinding`, `label`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'InputRecordField'", None, _errors__) - _constructed = cls( - name=name, - doc=doc, - type=type, - inputBinding=inputBinding, - label=label, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["name", "doc", "type", "inputBinding", "label"]) - - -class InputRecordSchema(RecordSchema, InputSchema): - def __init__( - self, - type: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type = type - self.label = label - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InputRecordSchema): - return bool( - self.fields == other.fields - and self.type == other.type - and self.label == other.label - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.fields, self.type, self.label, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InputRecordSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'fields' field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - ) - ) - else: - fields = None - try: - type = load_field( - _doc.get("type"), - typedsl_Record_symbolLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'InputRecordSchema'", None, _errors__) - _constructed = cls( - fields=fields, - type=type, - label=label, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.fields is not None and "fields" not in r: - r["fields"] = save( - self.fields, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="fields", - val=r.get("fields"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type", "label", "name"]) - - -class InputEnumSchema(EnumSchema, InputSchema): - def __init__( - self, - symbols: Any, - type: Any, - label: Optional[Any] = None, - name: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.symbols = symbols - self.type = type - self.label = label - self.name = name - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InputEnumSchema): - return bool( - self.symbols == other.symbols - and self.type == other.type - and self.label == other.label - and self.name == other.name - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash((self.symbols, self.type, self.label, self.name, self.inputBinding)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InputEnumSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'symbols' field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Enum_symbolLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`, `label`, `name`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'InputEnumSchema'", None, _errors__) - _constructed = cls( - symbols=symbols, - type=type, - label=label, - name=name, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) - r["symbols"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="symbols", - val=r.get("symbols"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["symbols", "type", "label", "name", "inputBinding"]) - - -class InputArraySchema(ArraySchema, InputSchema): - def __init__( - self, - items: Any, - type: Any, - label: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type = type - self.label = label - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InputArraySchema): - return bool( - self.items == other.items - and self.type == other.type - and self.label == other.label - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash((self.items, self.type, self.label, self.inputBinding)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InputArraySchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - items = load_field( - _doc.get("items"), - typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'items' field is not valid because:", - SourceLine(_doc, "items", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Array_symbolLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'InputArraySchema'", None, _errors__) - _constructed = cls( - items=items, - type=type, - label=label, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.items is not None and "items" not in r: - r["items"] = save( - self.items, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="items", - val=r.get("items"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type", "label", "inputBinding"]) - - -class OutputRecordField(RecordField): - def __init__( - self, - name: Any, - type: Any, - doc: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.name = name - self.doc = doc - self.type = type - self.outputBinding = outputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputRecordField): - return bool( - self.name == other.name - and self.doc == other.doc - and self.type == other.type - and self.outputBinding == other.outputBinding - ) - return False - - def __hash__(self) -> int: - return hash((self.name, self.doc, self.type, self.outputBinding)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "OutputRecordField": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - raise ValidationException("Missing name") - if not __original_name_is_none: - baseuri = name - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputBinding' field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - ) - ) - else: - outputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `name`, `doc`, `type`, `outputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'OutputRecordField'", None, _errors__) - _constructed = cls( - name=name, - doc=doc, - type=type, - outputBinding=outputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputBinding is not None and "outputBinding" not in r: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputBinding", - val=r.get("outputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["name", "doc", "type", "outputBinding"]) - - -class OutputRecordSchema(RecordSchema, OutputSchema): - def __init__( - self, - type: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type = type - self.label = label - - def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputRecordSchema): - return bool( - self.fields == other.fields - and self.type == other.type - and self.label == other.label - ) - return False - - def __hash__(self) -> int: - return hash((self.fields, self.type, self.label)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "OutputRecordSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'fields' field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - ) - ) - else: - fields = None - try: - type = load_field( - _doc.get("type"), - typedsl_Record_symbolLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'OutputRecordSchema'", None, _errors__) - _constructed = cls( - fields=fields, - type=type, - label=label, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.fields is not None and "fields" not in r: - r["fields"] = save( - self.fields, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="fields", - val=r.get("fields"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type", "label"]) - - -class OutputEnumSchema(EnumSchema, OutputSchema): - def __init__( - self, - symbols: Any, - type: Any, - label: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.symbols = symbols - self.type = type - self.label = label - self.outputBinding = outputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputEnumSchema): - return bool( - self.symbols == other.symbols - and self.type == other.type - and self.label == other.label - and self.outputBinding == other.outputBinding - ) - return False - - def __hash__(self) -> int: - return hash((self.symbols, self.type, self.label, self.outputBinding)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "OutputEnumSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'symbols' field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Enum_symbolLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputBinding' field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - ) - ) - else: - outputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`, `label`, `outputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'OutputEnumSchema'", None, _errors__) - _constructed = cls( - symbols=symbols, - type=type, - label=label, - outputBinding=outputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) - r["symbols"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="symbols", - val=r.get("symbols"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputBinding is not None and "outputBinding" not in r: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputBinding", - val=r.get("outputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["symbols", "type", "label", "outputBinding"]) - - -class OutputArraySchema(ArraySchema, OutputSchema): - def __init__( - self, - items: Any, - type: Any, - label: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type = type - self.label = label - self.outputBinding = outputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputArraySchema): - return bool( - self.items == other.items - and self.type == other.type - and self.label == other.label - and self.outputBinding == other.outputBinding - ) - return False - - def __hash__(self) -> int: - return hash((self.items, self.type, self.label, self.outputBinding)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "OutputArraySchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - items = load_field( - _doc.get("items"), - typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'items' field is not valid because:", - SourceLine(_doc, "items", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Array_symbolLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputBinding' field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - ) - ) - else: - outputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `outputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'OutputArraySchema'", None, _errors__) - _constructed = cls( - items=items, - type=type, - label=label, - outputBinding=outputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.items is not None and "items" not in r: - r["items"] = save( - self.items, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="items", - val=r.get("items"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputBinding is not None and "outputBinding" not in r: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputBinding", - val=r.get("outputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type", "label", "outputBinding"]) - - -class InputParameter(Parameter): - def __init__( - self, - id: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - format: Optional[Any] = None, - inputBinding: Optional[Any] = None, - default: Optional[Any] = None, - type: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.format = format - self.inputBinding = inputBinding - self.default = default - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.inputBinding == other.inputBinding - and self.default == other.default - and self.type == other.type - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.inputBinding, - self.default, - self.type, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - raise ValidationException("Missing id") - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - if "default" in _doc: - try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'default' field is not valid because:", - SourceLine(_doc, "default", str), - [e], - ) - ) - else: - default = None - if "type" in _doc: - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - else: - type = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `inputBinding`, `default`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'InputParameter'", None, _errors__) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - inputBinding=inputBinding, - default=default, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.default is not None and "default" not in r: - r["default"] = save( - self.default, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="default", - val=r.get("default"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "format", - "inputBinding", - "default", - "type", - ] - ) - - -class OutputParameter(Parameter): - def __init__( - self, - id: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - outputBinding: Optional[Any] = None, - format: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.outputBinding = outputBinding - self.format = format - - def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.outputBinding == other.outputBinding - and self.format == other.format - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.outputBinding, - self.format, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "OutputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - raise ValidationException("Missing id") - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputBinding' field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - ) - ) - else: - outputBinding = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'OutputParameter'", None, _errors__) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - outputBinding=outputBinding, - format=format, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputBinding is not None and "outputBinding" not in r: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputBinding", - val=r.get("outputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "outputBinding", - "format", - ] - ) - - -class ProcessRequirement(Saveable): - """ - A process requirement declares a prerequisite that may or must be fulfilled - before executing a process. See [`Process.hints`](#process) and - [`Process.requirements`](#process). - - Process requirements are the primary mechanism for specifying extensions to - the CWL core specification. - - """ - - pass - - -class Process(Saveable): - """ - - The base executable type in CWL is the `Process` object defined by the - document. Note that the `Process` object is abstract and cannot be - directly executed. - - """ - - pass - - -class InlineJavascriptRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support inline Javascript expressions. - If this requirement is not present, the workflow platform must not perform expression - interpolatation. - - """ - - def __init__( - self, - expressionLib: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "InlineJavascriptRequirement" - self.expressionLib = expressionLib - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InlineJavascriptRequirement): - return bool( - self.class_ == other.class_ - and self.expressionLib == other.expressionLib - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.expressionLib)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InlineJavascriptRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "InlineJavascriptRequirement": - raise ValidationException("Not a InlineJavascriptRequirement") - - if "expressionLib" in _doc: - try: - expressionLib = load_field( - _doc.get("expressionLib"), - union_of_None_type_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'expressionLib' field is not valid because:", - SourceLine(_doc, "expressionLib", str), - [e], - ) - ) - else: - expressionLib = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `expressionLib`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'InlineJavascriptRequirement'", None, _errors__ - ) - _constructed = cls( - expressionLib=expressionLib, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "InlineJavascriptRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.expressionLib is not None and "expressionLib" not in r: - r["expressionLib"] = save( - self.expressionLib, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="expressionLib", - val=r.get("expressionLib"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "expressionLib"]) - - -class SchemaDefRequirement(ProcessRequirement): - """ - This field consists of an array of type definitions which must be used when - interpreting the `inputs` and `outputs` fields. When a `type` field - contain a IRI, the implementation must check if the type is defined in - `schemaDefs` and use that definition. If the type is not found in - `schemaDefs`, it is an error. The entries in `schemaDefs` must be - processed in the order listed such that later schema definitions may refer - to earlier schema definitions. - - """ - - def __init__( - self, - types: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "SchemaDefRequirement" - self.types = types - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SchemaDefRequirement): - return bool(self.class_ == other.class_ and self.types == other.types) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.types)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "SchemaDefRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "SchemaDefRequirement": - raise ValidationException("Not a SchemaDefRequirement") - - try: - types = load_field( - _doc.get("types"), - array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'types' field is not valid because:", - SourceLine(_doc, "types", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `types`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'SchemaDefRequirement'", None, _errors__) - _constructed = cls( - types=types, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "SchemaDefRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.types is not None and "types" not in r: - r["types"] = save( - self.types, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="types", - val=r.get("types"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "types"]) - - -class EnvironmentDef(Saveable): - """ - Define an environment variable that will be set in the runtime environment - by the workflow platform when executing the command line tool. May be the - result of executing an expression, such as getting a parameter from input. - - """ - - def __init__( - self, - envName: Any, - envValue: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.envName = envName - self.envValue = envValue - - def __eq__(self, other: Any) -> bool: - if isinstance(other, EnvironmentDef): - return bool( - self.envName == other.envName and self.envValue == other.envValue - ) - return False - - def __hash__(self) -> int: - return hash((self.envName, self.envValue)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "EnvironmentDef": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - envName = load_field( - _doc.get("envName"), - strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'envName' field is not valid because:", - SourceLine(_doc, "envName", str), - [e], - ) - ) - try: - envValue = load_field( - _doc.get("envValue"), - union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'envValue' field is not valid because:", - SourceLine(_doc, "envValue", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `envName`, `envValue`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'EnvironmentDef'", None, _errors__) - _constructed = cls( - envName=envName, - envValue=envValue, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.envName is not None and "envName" not in r: - r["envName"] = save( - self.envName, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="envName", - val=r.get("envName"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.envValue is not None and "envValue" not in r: - r["envValue"] = save( - self.envValue, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="envValue", - val=r.get("envValue"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["envName", "envValue"]) - - -class CommandLineBinding(InputBinding): - """ - - When listed under `inputBinding` in the input schema, the term - "value" refers to the the corresponding value in the input object. For - binding objects listed in `CommandLineTool.arguments`, the term "value" - refers to the effective value after evaluating `valueFrom`. - - The binding behavior when building the command line depends on the data - type of the value. If there is a mismatch between the type described by - the input schema and the effective value, such as resulting from an - expression evaluation, an implementation must use the data type of the - effective value. - - - **string**: Add `prefix` and the string to the command line. - - - **number**: Add `prefix` and decimal representation to command line. - - - **boolean**: If true, add `prefix` to the command line. If false, add - nothing. - - - **File**: Add `prefix` and the value of - [`File.path`](#File) to the command line. - - - **Directory**: Add `prefix` and the value of - [`Directory.path`](#Directory) to the command line. - - - **array**: If `itemSeparator` is specified, add `prefix` and the join - the array into a single string with `itemSeparator` separating the - items. Otherwise first add `prefix`, then recursively process - individual elements. - If the array is empty, it does not add anything to command line. - - - **object**: Add `prefix` only, and recursively add object fields for - which `inputBinding` is specified. - - - **null**: Add nothing. - - """ - - def __init__( - self, - loadContents: Optional[Any] = None, - position: Optional[Any] = None, - prefix: Optional[Any] = None, - separate: Optional[Any] = None, - itemSeparator: Optional[Any] = None, - valueFrom: Optional[Any] = None, - shellQuote: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.loadContents = loadContents - self.position = position - self.prefix = prefix - self.separate = separate - self.itemSeparator = itemSeparator - self.valueFrom = valueFrom - self.shellQuote = shellQuote - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandLineBinding): - return bool( - self.loadContents == other.loadContents - and self.position == other.position - and self.prefix == other.prefix - and self.separate == other.separate - and self.itemSeparator == other.itemSeparator - and self.valueFrom == other.valueFrom - and self.shellQuote == other.shellQuote - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.loadContents, - self.position, - self.prefix, - self.separate, - self.itemSeparator, - self.valueFrom, - self.shellQuote, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandLineBinding": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - if "position" in _doc: - try: - position = load_field( - _doc.get("position"), - union_of_None_type_or_inttype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'position' field is not valid because:", - SourceLine(_doc, "position", str), - [e], - ) - ) - else: - position = None - if "prefix" in _doc: - try: - prefix = load_field( - _doc.get("prefix"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'prefix' field is not valid because:", - SourceLine(_doc, "prefix", str), - [e], - ) - ) - else: - prefix = None - if "separate" in _doc: - try: - separate = load_field( - _doc.get("separate"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'separate' field is not valid because:", - SourceLine(_doc, "separate", str), - [e], - ) - ) - else: - separate = None - if "itemSeparator" in _doc: - try: - itemSeparator = load_field( - _doc.get("itemSeparator"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'itemSeparator' field is not valid because:", - SourceLine(_doc, "itemSeparator", str), - [e], - ) - ) - else: - itemSeparator = None - if "valueFrom" in _doc: - try: - valueFrom = load_field( - _doc.get("valueFrom"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'valueFrom' field is not valid because:", - SourceLine(_doc, "valueFrom", str), - [e], - ) - ) - else: - valueFrom = None - if "shellQuote" in _doc: - try: - shellQuote = load_field( - _doc.get("shellQuote"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'shellQuote' field is not valid because:", - SourceLine(_doc, "shellQuote", str), - [e], - ) - ) - else: - shellQuote = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `loadContents`, `position`, `prefix`, `separate`, `itemSeparator`, `valueFrom`, `shellQuote`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'CommandLineBinding'", None, _errors__) - _constructed = cls( - loadContents=loadContents, - position=position, - prefix=prefix, - separate=separate, - itemSeparator=itemSeparator, - valueFrom=valueFrom, - shellQuote=shellQuote, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.position is not None and "position" not in r: - r["position"] = save( - self.position, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="position", - val=r.get("position"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.prefix is not None and "prefix" not in r: - r["prefix"] = save( - self.prefix, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="prefix", - val=r.get("prefix"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.separate is not None and "separate" not in r: - r["separate"] = save( - self.separate, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="separate", - val=r.get("separate"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.itemSeparator is not None and "itemSeparator" not in r: - r["itemSeparator"] = save( - self.itemSeparator, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="itemSeparator", - val=r.get("itemSeparator"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.valueFrom is not None and "valueFrom" not in r: - r["valueFrom"] = save( - self.valueFrom, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="valueFrom", - val=r.get("valueFrom"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.shellQuote is not None and "shellQuote" not in r: - r["shellQuote"] = save( - self.shellQuote, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="shellQuote", - val=r.get("shellQuote"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "loadContents", - "position", - "prefix", - "separate", - "itemSeparator", - "valueFrom", - "shellQuote", - ] - ) - - -class CommandOutputBinding(OutputBinding): - """ - Describes how to generate an output parameter based on the files produced - by a CommandLineTool. - - The output parameter value is generated by applying these operations in the - following order: - - - glob - - loadContents - - outputEval - - secondaryFiles - - """ - - def __init__( - self, - glob: Optional[Any] = None, - loadContents: Optional[Any] = None, - outputEval: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.glob = glob - self.loadContents = loadContents - self.outputEval = outputEval - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputBinding): - return bool( - self.glob == other.glob - and self.loadContents == other.loadContents - and self.outputEval == other.outputEval - ) - return False - - def __hash__(self) -> int: - return hash((self.glob, self.loadContents, self.outputEval)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputBinding": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "glob" in _doc: - try: - glob = load_field( - _doc.get("glob"), - union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'glob' field is not valid because:", - SourceLine(_doc, "glob", str), - [e], - ) - ) - else: - glob = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - if "outputEval" in _doc: - try: - outputEval = load_field( - _doc.get("outputEval"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputEval' field is not valid because:", - SourceLine(_doc, "outputEval", str), - [e], - ) - ) - else: - outputEval = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `glob`, `loadContents`, `outputEval`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'CommandOutputBinding'", None, _errors__) - _constructed = cls( - glob=glob, - loadContents=loadContents, - outputEval=outputEval, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.glob is not None and "glob" not in r: - r["glob"] = save( - self.glob, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="glob", - val=r.get("glob"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputEval is not None and "outputEval" not in r: - r["outputEval"] = save( - self.outputEval, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputEval", - val=r.get("outputEval"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["glob", "loadContents", "outputEval"]) - - -class CommandInputRecordField(InputRecordField): - def __init__( - self, - name: Any, - type: Any, - doc: Optional[Any] = None, - inputBinding: Optional[Any] = None, - label: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.name = name - self.doc = doc - self.type = type - self.inputBinding = inputBinding - self.label = label - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputRecordField): - return bool( - self.name == other.name - and self.doc == other.doc - and self.type == other.type - and self.inputBinding == other.inputBinding - and self.label == other.label - ) - return False - - def __hash__(self) -> int: - return hash((self.name, self.doc, self.type, self.inputBinding, self.label)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandInputRecordField": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - raise ValidationException("Missing name") - if not __original_name_is_none: - baseuri = name - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `name`, `doc`, `type`, `inputBinding`, `label`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandInputRecordField'", None, _errors__ - ) - _constructed = cls( - name=name, - doc=doc, - type=type, - inputBinding=inputBinding, - label=label, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["name", "doc", "type", "inputBinding", "label"]) - - -class CommandInputRecordSchema(InputRecordSchema): - def __init__( - self, - type: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type = type - self.label = label - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputRecordSchema): - return bool( - self.fields == other.fields - and self.type == other.type - and self.label == other.label - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.fields, self.type, self.label, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandInputRecordSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'fields' field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - ) - ) - else: - fields = None - try: - type = load_field( - _doc.get("type"), - typedsl_Record_symbolLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandInputRecordSchema'", None, _errors__ - ) - _constructed = cls( - fields=fields, - type=type, - label=label, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.fields is not None and "fields" not in r: - r["fields"] = save( - self.fields, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="fields", - val=r.get("fields"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type", "label", "name"]) - - -class CommandInputEnumSchema(InputEnumSchema): - def __init__( - self, - symbols: Any, - type: Any, - label: Optional[Any] = None, - name: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.symbols = symbols - self.type = type - self.label = label - self.name = name - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputEnumSchema): - return bool( - self.symbols == other.symbols - and self.type == other.type - and self.label == other.label - and self.name == other.name - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash((self.symbols, self.type, self.label, self.name, self.inputBinding)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandInputEnumSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'symbols' field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Enum_symbolLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`, `label`, `name`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandInputEnumSchema'", None, _errors__ - ) - _constructed = cls( - symbols=symbols, - type=type, - label=label, - name=name, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) - r["symbols"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="symbols", - val=r.get("symbols"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["symbols", "type", "label", "name", "inputBinding"]) - - -class CommandInputArraySchema(InputArraySchema): - def __init__( - self, - items: Any, - type: Any, - label: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type = type - self.label = label - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputArraySchema): - return bool( - self.items == other.items - and self.type == other.type - and self.label == other.label - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash((self.items, self.type, self.label, self.inputBinding)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandInputArraySchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - items = load_field( - _doc.get("items"), - typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'items' field is not valid because:", - SourceLine(_doc, "items", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Array_symbolLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandInputArraySchema'", None, _errors__ - ) - _constructed = cls( - items=items, - type=type, - label=label, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.items is not None and "items" not in r: - r["items"] = save( - self.items, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="items", - val=r.get("items"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type", "label", "inputBinding"]) - - -class CommandOutputRecordField(OutputRecordField): - def __init__( - self, - name: Any, - type: Any, - doc: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.name = name - self.doc = doc - self.type = type - self.outputBinding = outputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputRecordField): - return bool( - self.name == other.name - and self.doc == other.doc - and self.type == other.type - and self.outputBinding == other.outputBinding - ) - return False - - def __hash__(self) -> int: - return hash((self.name, self.doc, self.type, self.outputBinding)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputRecordField": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - raise ValidationException("Missing name") - if not __original_name_is_none: - baseuri = name - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputBinding' field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - ) - ) - else: - outputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `name`, `doc`, `type`, `outputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandOutputRecordField'", None, _errors__ - ) - _constructed = cls( - name=name, - doc=doc, - type=type, - outputBinding=outputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputBinding is not None and "outputBinding" not in r: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputBinding", - val=r.get("outputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["name", "doc", "type", "outputBinding"]) - - -class CommandOutputRecordSchema(OutputRecordSchema): - def __init__( - self, - type: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type = type - self.label = label - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputRecordSchema): - return bool( - self.fields == other.fields - and self.type == other.type - and self.label == other.label - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.fields, self.type, self.label, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputRecordSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'fields' field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - ) - ) - else: - fields = None - try: - type = load_field( - _doc.get("type"), - typedsl_Record_symbolLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandOutputRecordSchema'", None, _errors__ - ) - _constructed = cls( - fields=fields, - type=type, - label=label, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.fields is not None and "fields" not in r: - r["fields"] = save( - self.fields, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="fields", - val=r.get("fields"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type", "label", "name"]) - - -class CommandOutputEnumSchema(OutputEnumSchema): - def __init__( - self, - symbols: Any, - type: Any, - label: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.symbols = symbols - self.type = type - self.label = label - self.outputBinding = outputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputEnumSchema): - return bool( - self.symbols == other.symbols - and self.type == other.type - and self.label == other.label - and self.outputBinding == other.outputBinding - ) - return False - - def __hash__(self) -> int: - return hash((self.symbols, self.type, self.label, self.outputBinding)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputEnumSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'symbols' field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Enum_symbolLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputBinding' field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - ) - ) - else: - outputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`, `label`, `outputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandOutputEnumSchema'", None, _errors__ - ) - _constructed = cls( - symbols=symbols, - type=type, - label=label, - outputBinding=outputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) - r["symbols"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="symbols", - val=r.get("symbols"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputBinding is not None and "outputBinding" not in r: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputBinding", - val=r.get("outputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["symbols", "type", "label", "outputBinding"]) - - -class CommandOutputArraySchema(OutputArraySchema): - def __init__( - self, - items: Any, - type: Any, - label: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type = type - self.label = label - self.outputBinding = outputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputArraySchema): - return bool( - self.items == other.items - and self.type == other.type - and self.label == other.label - and self.outputBinding == other.outputBinding - ) - return False - - def __hash__(self) -> int: - return hash((self.items, self.type, self.label, self.outputBinding)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputArraySchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - items = load_field( - _doc.get("items"), - typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'items' field is not valid because:", - SourceLine(_doc, "items", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Array_symbolLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputBinding' field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - ) - ) - else: - outputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `outputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandOutputArraySchema'", None, _errors__ - ) - _constructed = cls( - items=items, - type=type, - label=label, - outputBinding=outputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.items is not None and "items" not in r: - r["items"] = save( - self.items, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="items", - val=r.get("items"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputBinding is not None and "outputBinding" not in r: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputBinding", - val=r.get("outputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type", "label", "outputBinding"]) - - -class CommandInputParameter(InputParameter): - """ - An input parameter for a CommandLineTool. - """ - - def __init__( - self, - id: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - format: Optional[Any] = None, - inputBinding: Optional[Any] = None, - default: Optional[Any] = None, - type: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.format = format - self.inputBinding = inputBinding - self.default = default - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.inputBinding == other.inputBinding - and self.default == other.default - and self.type == other.type - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.inputBinding, - self.default, - self.type, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandInputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - raise ValidationException("Missing id") - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - if "default" in _doc: - try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'default' field is not valid because:", - SourceLine(_doc, "default", str), - [e], - ) - ) - else: - default = None - if "type" in _doc: - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - else: - type = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `inputBinding`, `default`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'CommandInputParameter'", None, _errors__) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - inputBinding=inputBinding, - default=default, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.default is not None and "default" not in r: - r["default"] = save( - self.default, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="default", - val=r.get("default"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "format", - "inputBinding", - "default", - "type", - ] - ) - - -class CommandOutputParameter(OutputParameter): - """ - An output parameter for a CommandLineTool. - """ - - def __init__( - self, - id: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - outputBinding: Optional[Any] = None, - format: Optional[Any] = None, - type: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.outputBinding = outputBinding - self.format = format - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.outputBinding == other.outputBinding - and self.format == other.format - and self.type == other.type - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.outputBinding, - self.format, - self.type, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - raise ValidationException("Missing id") - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputBinding' field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - ) - ) - else: - outputBinding = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "type" in _doc: - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - else: - type = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandOutputParameter'", None, _errors__ - ) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - outputBinding=outputBinding, - format=format, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputBinding is not None and "outputBinding" not in r: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputBinding", - val=r.get("outputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "outputBinding", - "format", - "type", - ] - ) - - -class CommandLineTool(Process): - """ - This defines the schema of the CWL Command Line Tool Description document. - - """ - - def __init__( - self, - inputs: Any, - outputs: Any, - id: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - baseCommand: Optional[Any] = None, - arguments: Optional[Any] = None, - stdin: Optional[Any] = None, - stderr: Optional[Any] = None, - stdout: Optional[Any] = None, - successCodes: Optional[Any] = None, - temporaryFailCodes: Optional[Any] = None, - permanentFailCodes: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.label = label - self.doc = doc - self.cwlVersion = cwlVersion - self.class_ = "CommandLineTool" - self.baseCommand = baseCommand - self.arguments = arguments - self.stdin = stdin - self.stderr = stderr - self.stdout = stdout - self.successCodes = successCodes - self.temporaryFailCodes = temporaryFailCodes - self.permanentFailCodes = permanentFailCodes - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandLineTool): - return bool( - self.id == other.id - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.label == other.label - and self.doc == other.doc - and self.cwlVersion == other.cwlVersion - and self.class_ == other.class_ - and self.baseCommand == other.baseCommand - and self.arguments == other.arguments - and self.stdin == other.stdin - and self.stderr == other.stderr - and self.stdout == other.stdout - and self.successCodes == other.successCodes - and self.temporaryFailCodes == other.temporaryFailCodes - and self.permanentFailCodes == other.permanentFailCodes - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.label, - self.doc, - self.cwlVersion, - self.class_, - self.baseCommand, - self.arguments, - self.stdin, - self.stderr, - self.stdout, - self.successCodes, - self.temporaryFailCodes, - self.permanentFailCodes, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandLineTool": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "CommandLineTool": - raise ValidationException("Not a CommandLineTool") - - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - try: - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_CommandInputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputs' field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - ) - ) - try: - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_CommandOutputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputs' field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - ) - ) - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'requirements' field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - ) - ) - else: - requirements = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'hints' field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - ) - ) - else: - hints = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "cwlVersion" in _doc: - try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'cwlVersion' field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [e], - ) - ) - else: - cwlVersion = None - if "baseCommand" in _doc: - try: - baseCommand = load_field( - _doc.get("baseCommand"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'baseCommand' field is not valid because:", - SourceLine(_doc, "baseCommand", str), - [e], - ) - ) - else: - baseCommand = None - if "arguments" in _doc: - try: - arguments = load_field( - _doc.get("arguments"), - union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'arguments' field is not valid because:", - SourceLine(_doc, "arguments", str), - [e], - ) - ) - else: - arguments = None - if "stdin" in _doc: - try: - stdin = load_field( - _doc.get("stdin"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'stdin' field is not valid because:", - SourceLine(_doc, "stdin", str), - [e], - ) - ) - else: - stdin = None - if "stderr" in _doc: - try: - stderr = load_field( - _doc.get("stderr"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'stderr' field is not valid because:", - SourceLine(_doc, "stderr", str), - [e], - ) - ) - else: - stderr = None - if "stdout" in _doc: - try: - stdout = load_field( - _doc.get("stdout"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'stdout' field is not valid because:", - SourceLine(_doc, "stdout", str), - [e], - ) - ) - else: - stdout = None - if "successCodes" in _doc: - try: - successCodes = load_field( - _doc.get("successCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'successCodes' field is not valid because:", - SourceLine(_doc, "successCodes", str), - [e], - ) - ) - else: - successCodes = None - if "temporaryFailCodes" in _doc: - try: - temporaryFailCodes = load_field( - _doc.get("temporaryFailCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'temporaryFailCodes' field is not valid because:", - SourceLine(_doc, "temporaryFailCodes", str), - [e], - ) - ) - else: - temporaryFailCodes = None - if "permanentFailCodes" in _doc: - try: - permanentFailCodes = load_field( - _doc.get("permanentFailCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'permanentFailCodes' field is not valid because:", - SourceLine(_doc, "permanentFailCodes", str), - [e], - ) - ) - else: - permanentFailCodes = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `inputs`, `outputs`, `requirements`, `hints`, `label`, `doc`, `cwlVersion`, `class`, `baseCommand`, `arguments`, `stdin`, `stderr`, `stdout`, `successCodes`, `temporaryFailCodes`, `permanentFailCodes`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'CommandLineTool'", None, _errors__) - _constructed = cls( - id=id, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - label=label, - doc=doc, - cwlVersion=cwlVersion, - baseCommand=baseCommand, - arguments=arguments, - stdin=stdin, - stderr=stderr, - stdout=stdout, - successCodes=successCodes, - temporaryFailCodes=temporaryFailCodes, - permanentFailCodes=permanentFailCodes, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "CommandLineTool" - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputs is not None and "inputs" not in r: - r["inputs"] = save( - self.inputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputs", - val=r.get("inputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputs is not None and "outputs" not in r: - r["outputs"] = save( - self.outputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputs", - val=r.get("outputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.requirements is not None and "requirements" not in r: - r["requirements"] = save( - self.requirements, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="requirements", - val=r.get("requirements"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.hints is not None and "hints" not in r: - r["hints"] = save( - self.hints, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="hints", - val=r.get("hints"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) - r["cwlVersion"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="cwlVersion", - val=r.get("cwlVersion"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.baseCommand is not None and "baseCommand" not in r: - r["baseCommand"] = save( - self.baseCommand, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="baseCommand", - val=r.get("baseCommand"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.arguments is not None and "arguments" not in r: - r["arguments"] = save( - self.arguments, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="arguments", - val=r.get("arguments"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.stdin is not None and "stdin" not in r: - r["stdin"] = save( - self.stdin, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="stdin", - val=r.get("stdin"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.stderr is not None and "stderr" not in r: - r["stderr"] = save( - self.stderr, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="stderr", - val=r.get("stderr"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.stdout is not None and "stdout" not in r: - r["stdout"] = save( - self.stdout, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="stdout", - val=r.get("stdout"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.successCodes is not None and "successCodes" not in r: - r["successCodes"] = save( - self.successCodes, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="successCodes", - val=r.get("successCodes"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.temporaryFailCodes is not None and "temporaryFailCodes" not in r: - r["temporaryFailCodes"] = save( - self.temporaryFailCodes, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="temporaryFailCodes", - val=r.get("temporaryFailCodes"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.permanentFailCodes is not None and "permanentFailCodes" not in r: - r["permanentFailCodes"] = save( - self.permanentFailCodes, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="permanentFailCodes", - val=r.get("permanentFailCodes"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "inputs", - "outputs", - "requirements", - "hints", - "label", - "doc", - "cwlVersion", - "class", - "baseCommand", - "arguments", - "stdin", - "stderr", - "stdout", - "successCodes", - "temporaryFailCodes", - "permanentFailCodes", - ] - ) - - -class DockerRequirement(ProcessRequirement): - """ - Indicates that a workflow component should be run in a - [Docker](http://docker.com) container, and specifies how to fetch or build - the image. - - If a CommandLineTool lists `DockerRequirement` under - `hints` (or `requirements`), it may (or must) be run in the specified Docker - container. - - The platform must first acquire or install the correct Docker image as - specified by `dockerPull`, `dockerImport`, `dockerLoad` or `dockerFile`. - - The platform must execute the tool in the container using `docker run` with - the appropriate Docker image and tool command line. - - The workflow platform may provide input files and the designated output - directory through the use of volume bind mounts. The platform should rewrite - file paths in the input object to correspond to the Docker bind mounted - locations. That is, the platform should rewrite values in the parameter context - such as `runtime.outdir`, `runtime.tmpdir` and others to be valid paths - within the container. - - When running a tool contained in Docker, the workflow platform must not - assume anything about the contents of the Docker container, such as the - presence or absence of specific software, except to assume that the - generated command line represents a valid command within the runtime - environment of the container. - - ## Interaction with other requirements - - If [EnvVarRequirement](#EnvVarRequirement) is specified alongside a - DockerRequirement, the environment variables must be provided to Docker - using `--env` or `--env-file` and interact with the container's preexisting - environment as defined by Docker. - - """ - - def __init__( - self, - dockerPull: Optional[Any] = None, - dockerLoad: Optional[Any] = None, - dockerFile: Optional[Any] = None, - dockerImport: Optional[Any] = None, - dockerImageId: Optional[Any] = None, - dockerOutputDirectory: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "DockerRequirement" - self.dockerPull = dockerPull - self.dockerLoad = dockerLoad - self.dockerFile = dockerFile - self.dockerImport = dockerImport - self.dockerImageId = dockerImageId - self.dockerOutputDirectory = dockerOutputDirectory - - def __eq__(self, other: Any) -> bool: - if isinstance(other, DockerRequirement): - return bool( - self.class_ == other.class_ - and self.dockerPull == other.dockerPull - and self.dockerLoad == other.dockerLoad - and self.dockerFile == other.dockerFile - and self.dockerImport == other.dockerImport - and self.dockerImageId == other.dockerImageId - and self.dockerOutputDirectory == other.dockerOutputDirectory - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.class_, - self.dockerPull, - self.dockerLoad, - self.dockerFile, - self.dockerImport, - self.dockerImageId, - self.dockerOutputDirectory, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "DockerRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "DockerRequirement": - raise ValidationException("Not a DockerRequirement") - - if "dockerPull" in _doc: - try: - dockerPull = load_field( - _doc.get("dockerPull"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerPull' field is not valid because:", - SourceLine(_doc, "dockerPull", str), - [e], - ) - ) - else: - dockerPull = None - if "dockerLoad" in _doc: - try: - dockerLoad = load_field( - _doc.get("dockerLoad"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerLoad' field is not valid because:", - SourceLine(_doc, "dockerLoad", str), - [e], - ) - ) - else: - dockerLoad = None - if "dockerFile" in _doc: - try: - dockerFile = load_field( - _doc.get("dockerFile"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerFile' field is not valid because:", - SourceLine(_doc, "dockerFile", str), - [e], - ) - ) - else: - dockerFile = None - if "dockerImport" in _doc: - try: - dockerImport = load_field( - _doc.get("dockerImport"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerImport' field is not valid because:", - SourceLine(_doc, "dockerImport", str), - [e], - ) - ) - else: - dockerImport = None - if "dockerImageId" in _doc: - try: - dockerImageId = load_field( - _doc.get("dockerImageId"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerImageId' field is not valid because:", - SourceLine(_doc, "dockerImageId", str), - [e], - ) - ) - else: - dockerImageId = None - if "dockerOutputDirectory" in _doc: - try: - dockerOutputDirectory = load_field( - _doc.get("dockerOutputDirectory"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerOutputDirectory' field is not valid because:", - SourceLine(_doc, "dockerOutputDirectory", str), - [e], - ) - ) - else: - dockerOutputDirectory = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `dockerPull`, `dockerLoad`, `dockerFile`, `dockerImport`, `dockerImageId`, `dockerOutputDirectory`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'DockerRequirement'", None, _errors__) - _constructed = cls( - dockerPull=dockerPull, - dockerLoad=dockerLoad, - dockerFile=dockerFile, - dockerImport=dockerImport, - dockerImageId=dockerImageId, - dockerOutputDirectory=dockerOutputDirectory, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "DockerRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.dockerPull is not None and "dockerPull" not in r: - r["dockerPull"] = save( - self.dockerPull, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerPull", - val=r.get("dockerPull"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dockerLoad is not None and "dockerLoad" not in r: - r["dockerLoad"] = save( - self.dockerLoad, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerLoad", - val=r.get("dockerLoad"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dockerFile is not None and "dockerFile" not in r: - r["dockerFile"] = save( - self.dockerFile, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerFile", - val=r.get("dockerFile"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dockerImport is not None and "dockerImport" not in r: - r["dockerImport"] = save( - self.dockerImport, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerImport", - val=r.get("dockerImport"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dockerImageId is not None and "dockerImageId" not in r: - r["dockerImageId"] = save( - self.dockerImageId, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerImageId", - val=r.get("dockerImageId"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dockerOutputDirectory is not None and "dockerOutputDirectory" not in r: - r["dockerOutputDirectory"] = save( - self.dockerOutputDirectory, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerOutputDirectory", - val=r.get("dockerOutputDirectory"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "class", - "dockerPull", - "dockerLoad", - "dockerFile", - "dockerImport", - "dockerImageId", - "dockerOutputDirectory", - ] - ) - - -class SoftwareRequirement(ProcessRequirement): - """ - A list of software packages that should be configured in the environment of - the defined process. - - """ - - def __init__( - self, - packages: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "SoftwareRequirement" - self.packages = packages - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SoftwareRequirement): - return bool(self.class_ == other.class_ and self.packages == other.packages) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.packages)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "SoftwareRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "SoftwareRequirement": - raise ValidationException("Not a SoftwareRequirement") - - try: - packages = load_field( - _doc.get("packages"), - idmap_packages_array_of_SoftwarePackageLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'packages' field is not valid because:", - SourceLine(_doc, "packages", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `packages`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'SoftwareRequirement'", None, _errors__) - _constructed = cls( - packages=packages, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "SoftwareRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.packages is not None and "packages" not in r: - r["packages"] = save( - self.packages, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="packages", - val=r.get("packages"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "packages"]) - - -class SoftwarePackage(Saveable): - def __init__( - self, - package: Any, - version: Optional[Any] = None, - specs: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.package = package - self.version = version - self.specs = specs - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SoftwarePackage): - return bool( - self.package == other.package - and self.version == other.version - and self.specs == other.specs - ) - return False - - def __hash__(self) -> int: - return hash((self.package, self.version, self.specs)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "SoftwarePackage": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - package = load_field( - _doc.get("package"), - strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'package' field is not valid because:", - SourceLine(_doc, "package", str), - [e], - ) - ) - if "version" in _doc: - try: - version = load_field( - _doc.get("version"), - union_of_None_type_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'version' field is not valid because:", - SourceLine(_doc, "version", str), - [e], - ) - ) - else: - version = None - if "specs" in _doc: - try: - specs = load_field( - _doc.get("specs"), - uri_union_of_None_type_or_array_of_strtype_False_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'specs' field is not valid because:", - SourceLine(_doc, "specs", str), - [e], - ) - ) - else: - specs = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `package`, `version`, `specs`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'SoftwarePackage'", None, _errors__) - _constructed = cls( - package=package, - version=version, - specs=specs, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.package is not None and "package" not in r: - r["package"] = save( - self.package, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="package", - val=r.get("package"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.version is not None and "version" not in r: - r["version"] = save( - self.version, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="version", - val=r.get("version"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.specs is not None and "specs" not in r: - u = save_relative_uri(self.specs, base_url, False, None, relative_uris) - r["specs"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="specs", - val=r.get("specs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["package", "version", "specs"]) - - -class Dirent(Saveable): - """ - Define a file or subdirectory that must be placed in the designated output - directory prior to executing the command line tool. May be the result of - executing an expression, such as building a configuration file from a - template. - - """ - - def __init__( - self, - entry: Any, - entryname: Optional[Any] = None, - writable: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.entryname = entryname - self.entry = entry - self.writable = writable - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Dirent): - return bool( - self.entryname == other.entryname - and self.entry == other.entry - and self.writable == other.writable - ) - return False - - def __hash__(self) -> int: - return hash((self.entryname, self.entry, self.writable)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "Dirent": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "entryname" in _doc: - try: - entryname = load_field( - _doc.get("entryname"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'entryname' field is not valid because:", - SourceLine(_doc, "entryname", str), - [e], - ) - ) - else: - entryname = None - try: - entry = load_field( - _doc.get("entry"), - union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'entry' field is not valid because:", - SourceLine(_doc, "entry", str), - [e], - ) - ) - if "writable" in _doc: - try: - writable = load_field( - _doc.get("writable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'writable' field is not valid because:", - SourceLine(_doc, "writable", str), - [e], - ) - ) - else: - writable = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `entryname`, `entry`, `writable`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'Dirent'", None, _errors__) - _constructed = cls( - entryname=entryname, - entry=entry, - writable=writable, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.entryname is not None and "entryname" not in r: - r["entryname"] = save( - self.entryname, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="entryname", - val=r.get("entryname"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.entry is not None and "entry" not in r: - r["entry"] = save( - self.entry, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="entry", - val=r.get("entry"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.writable is not None and "writable" not in r: - r["writable"] = save( - self.writable, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="writable", - val=r.get("writable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["entryname", "entry", "writable"]) - - -class InitialWorkDirRequirement(ProcessRequirement): - """ - Define a list of files and subdirectories that must be created by the workflow platform in the designated output directory prior to executing the command line tool. - """ - - def __init__( - self, - listing: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "InitialWorkDirRequirement" - self.listing = listing - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InitialWorkDirRequirement): - return bool(self.class_ == other.class_ and self.listing == other.listing) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.listing)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InitialWorkDirRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "InitialWorkDirRequirement": - raise ValidationException("Not a InitialWorkDirRequirement") - - try: - listing = load_field( - _doc.get("listing"), - union_of_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'listing' field is not valid because:", - SourceLine(_doc, "listing", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `listing`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'InitialWorkDirRequirement'", None, _errors__ - ) - _constructed = cls( - listing=listing, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "InitialWorkDirRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.listing is not None and "listing" not in r: - r["listing"] = save( - self.listing, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="listing", - val=r.get("listing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "listing"]) - - -class EnvVarRequirement(ProcessRequirement): - """ - Define a list of environment variables which will be set in the - execution environment of the tool. See `EnvironmentDef` for details. - - """ - - def __init__( - self, - envDef: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "EnvVarRequirement" - self.envDef = envDef - - def __eq__(self, other: Any) -> bool: - if isinstance(other, EnvVarRequirement): - return bool(self.class_ == other.class_ and self.envDef == other.envDef) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.envDef)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "EnvVarRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "EnvVarRequirement": - raise ValidationException("Not a EnvVarRequirement") - - try: - envDef = load_field( - _doc.get("envDef"), - idmap_envDef_array_of_EnvironmentDefLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'envDef' field is not valid because:", - SourceLine(_doc, "envDef", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `envDef`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'EnvVarRequirement'", None, _errors__) - _constructed = cls( - envDef=envDef, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "EnvVarRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.envDef is not None and "envDef" not in r: - r["envDef"] = save( - self.envDef, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="envDef", - val=r.get("envDef"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "envDef"]) - - -class ShellCommandRequirement(ProcessRequirement): - """ - Modify the behavior of CommandLineTool to generate a single string - containing a shell command line. Each item in the argument list must be - joined into a string separated by single spaces and quoted to prevent - intepretation by the shell, unless `CommandLineBinding` for that argument - contains `shellQuote: false`. If `shellQuote: false` is specified, the - argument is joined into the command string without quoting, which allows - the use of shell metacharacters such as `|` for pipes. - - """ - - def __init__( - self, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ShellCommandRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ShellCommandRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ShellCommandRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "ShellCommandRequirement": - raise ValidationException("Not a ShellCommandRequirement") - - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'ShellCommandRequirement'", None, _errors__ - ) - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "ShellCommandRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class ResourceRequirement(ProcessRequirement): - """ - Specify basic hardware resource requirements. - - "min" is the minimum amount of a resource that must be reserved to schedule - a job. If "min" cannot be satisfied, the job should not be run. - - "max" is the maximum amount of a resource that the job shall be permitted - to use. If a node has sufficient resources, multiple jobs may be scheduled - on a single node provided each job's "max" resource requirements are - met. If a job attempts to exceed its "max" resource allocation, an - implementation may deny additional resources, which may result in job - failure. - - If "min" is specified but "max" is not, then "max" == "min" - If "max" is specified by "min" is not, then "min" == "max". - - It is an error if max < min. - - It is an error if the value of any of these fields is negative. - - If neither "min" nor "max" is specified for a resource, an implementation may provide a default. - - """ - - def __init__( - self, - coresMin: Optional[Any] = None, - coresMax: Optional[Any] = None, - ramMin: Optional[Any] = None, - ramMax: Optional[Any] = None, - tmpdirMin: Optional[Any] = None, - tmpdirMax: Optional[Any] = None, - outdirMin: Optional[Any] = None, - outdirMax: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ResourceRequirement" - self.coresMin = coresMin - self.coresMax = coresMax - self.ramMin = ramMin - self.ramMax = ramMax - self.tmpdirMin = tmpdirMin - self.tmpdirMax = tmpdirMax - self.outdirMin = outdirMin - self.outdirMax = outdirMax - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ResourceRequirement): - return bool( - self.class_ == other.class_ - and self.coresMin == other.coresMin - and self.coresMax == other.coresMax - and self.ramMin == other.ramMin - and self.ramMax == other.ramMax - and self.tmpdirMin == other.tmpdirMin - and self.tmpdirMax == other.tmpdirMax - and self.outdirMin == other.outdirMin - and self.outdirMax == other.outdirMax - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.class_, - self.coresMin, - self.coresMax, - self.ramMin, - self.ramMax, - self.tmpdirMin, - self.tmpdirMax, - self.outdirMin, - self.outdirMax, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ResourceRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "ResourceRequirement": - raise ValidationException("Not a ResourceRequirement") - - if "coresMin" in _doc: - try: - coresMin = load_field( - _doc.get("coresMin"), - union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'coresMin' field is not valid because:", - SourceLine(_doc, "coresMin", str), - [e], - ) - ) - else: - coresMin = None - if "coresMax" in _doc: - try: - coresMax = load_field( - _doc.get("coresMax"), - union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'coresMax' field is not valid because:", - SourceLine(_doc, "coresMax", str), - [e], - ) - ) - else: - coresMax = None - if "ramMin" in _doc: - try: - ramMin = load_field( - _doc.get("ramMin"), - union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'ramMin' field is not valid because:", - SourceLine(_doc, "ramMin", str), - [e], - ) - ) - else: - ramMin = None - if "ramMax" in _doc: - try: - ramMax = load_field( - _doc.get("ramMax"), - union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'ramMax' field is not valid because:", - SourceLine(_doc, "ramMax", str), - [e], - ) - ) - else: - ramMax = None - if "tmpdirMin" in _doc: - try: - tmpdirMin = load_field( - _doc.get("tmpdirMin"), - union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'tmpdirMin' field is not valid because:", - SourceLine(_doc, "tmpdirMin", str), - [e], - ) - ) - else: - tmpdirMin = None - if "tmpdirMax" in _doc: - try: - tmpdirMax = load_field( - _doc.get("tmpdirMax"), - union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'tmpdirMax' field is not valid because:", - SourceLine(_doc, "tmpdirMax", str), - [e], - ) - ) - else: - tmpdirMax = None - if "outdirMin" in _doc: - try: - outdirMin = load_field( - _doc.get("outdirMin"), - union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outdirMin' field is not valid because:", - SourceLine(_doc, "outdirMin", str), - [e], - ) - ) - else: - outdirMin = None - if "outdirMax" in _doc: - try: - outdirMax = load_field( - _doc.get("outdirMax"), - union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outdirMax' field is not valid because:", - SourceLine(_doc, "outdirMax", str), - [e], - ) - ) - else: - outdirMax = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `coresMin`, `coresMax`, `ramMin`, `ramMax`, `tmpdirMin`, `tmpdirMax`, `outdirMin`, `outdirMax`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'ResourceRequirement'", None, _errors__) - _constructed = cls( - coresMin=coresMin, - coresMax=coresMax, - ramMin=ramMin, - ramMax=ramMax, - tmpdirMin=tmpdirMin, - tmpdirMax=tmpdirMax, - outdirMin=outdirMin, - outdirMax=outdirMax, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "ResourceRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.coresMin is not None and "coresMin" not in r: - r["coresMin"] = save( - self.coresMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="coresMin", - val=r.get("coresMin"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.coresMax is not None and "coresMax" not in r: - r["coresMax"] = save( - self.coresMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="coresMax", - val=r.get("coresMax"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.ramMin is not None and "ramMin" not in r: - r["ramMin"] = save( - self.ramMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="ramMin", - val=r.get("ramMin"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.ramMax is not None and "ramMax" not in r: - r["ramMax"] = save( - self.ramMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="ramMax", - val=r.get("ramMax"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.tmpdirMin is not None and "tmpdirMin" not in r: - r["tmpdirMin"] = save( - self.tmpdirMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="tmpdirMin", - val=r.get("tmpdirMin"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.tmpdirMax is not None and "tmpdirMax" not in r: - r["tmpdirMax"] = save( - self.tmpdirMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="tmpdirMax", - val=r.get("tmpdirMax"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outdirMin is not None and "outdirMin" not in r: - r["outdirMin"] = save( - self.outdirMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outdirMin", - val=r.get("outdirMin"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outdirMax is not None and "outdirMax" not in r: - r["outdirMax"] = save( - self.outdirMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outdirMax", - val=r.get("outdirMax"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "class", - "coresMin", - "coresMax", - "ramMin", - "ramMax", - "tmpdirMin", - "tmpdirMax", - "outdirMin", - "outdirMax", - ] - ) - - -class ExpressionToolOutputParameter(OutputParameter): - def __init__( - self, - id: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - outputBinding: Optional[Any] = None, - format: Optional[Any] = None, - type: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.outputBinding = outputBinding - self.format = format - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ExpressionToolOutputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.outputBinding == other.outputBinding - and self.format == other.format - and self.type == other.type - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.outputBinding, - self.format, - self.type, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ExpressionToolOutputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - raise ValidationException("Missing id") - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputBinding' field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - ) - ) - else: - outputBinding = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "type" in _doc: - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - else: - type = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'ExpressionToolOutputParameter'", None, _errors__ - ) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - outputBinding=outputBinding, - format=format, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputBinding is not None and "outputBinding" not in r: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputBinding", - val=r.get("outputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "outputBinding", - "format", - "type", - ] - ) - - -class ExpressionTool(Process): - """ - Execute an expression as a Workflow step. - - """ - - def __init__( - self, - inputs: Any, - outputs: Any, - expression: Any, - id: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.label = label - self.doc = doc - self.cwlVersion = cwlVersion - self.class_ = "ExpressionTool" - self.expression = expression - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ExpressionTool): - return bool( - self.id == other.id - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.label == other.label - and self.doc == other.doc - and self.cwlVersion == other.cwlVersion - and self.class_ == other.class_ - and self.expression == other.expression - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.label, - self.doc, - self.cwlVersion, - self.class_, - self.expression, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ExpressionTool": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "ExpressionTool": - raise ValidationException("Not a ExpressionTool") - - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - try: - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_InputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputs' field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - ) - ) - try: - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_ExpressionToolOutputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputs' field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - ) - ) - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'requirements' field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - ) - ) - else: - requirements = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'hints' field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - ) - ) - else: - hints = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "cwlVersion" in _doc: - try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'cwlVersion' field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [e], - ) - ) - else: - cwlVersion = None - try: - expression = load_field( - _doc.get("expression"), - union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'expression' field is not valid because:", - SourceLine(_doc, "expression", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `inputs`, `outputs`, `requirements`, `hints`, `label`, `doc`, `cwlVersion`, `class`, `expression`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'ExpressionTool'", None, _errors__) - _constructed = cls( - id=id, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - label=label, - doc=doc, - cwlVersion=cwlVersion, - expression=expression, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "ExpressionTool" - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputs is not None and "inputs" not in r: - r["inputs"] = save( - self.inputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputs", - val=r.get("inputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputs is not None and "outputs" not in r: - r["outputs"] = save( - self.outputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputs", - val=r.get("outputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.requirements is not None and "requirements" not in r: - r["requirements"] = save( - self.requirements, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="requirements", - val=r.get("requirements"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.hints is not None and "hints" not in r: - r["hints"] = save( - self.hints, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="hints", - val=r.get("hints"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) - r["cwlVersion"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="cwlVersion", - val=r.get("cwlVersion"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.expression is not None and "expression" not in r: - r["expression"] = save( - self.expression, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="expression", - val=r.get("expression"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "inputs", - "outputs", - "requirements", - "hints", - "label", - "doc", - "cwlVersion", - "class", - "expression", - ] - ) - - -class WorkflowOutputParameter(OutputParameter): - """ - Describe an output parameter of a workflow. The parameter must be - connected to one or more parameters defined in the workflow that will - provide the value of the output parameter. - - """ - - def __init__( - self, - id: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - outputBinding: Optional[Any] = None, - format: Optional[Any] = None, - outputSource: Optional[Any] = None, - linkMerge: Optional[Any] = None, - type: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.outputBinding = outputBinding - self.format = format - self.outputSource = outputSource - self.linkMerge = linkMerge - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowOutputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.outputBinding == other.outputBinding - and self.format == other.format - and self.outputSource == other.outputSource - and self.linkMerge == other.linkMerge - and self.type == other.type - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.outputBinding, - self.format, - self.outputSource, - self.linkMerge, - self.type, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "WorkflowOutputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - raise ValidationException("Missing id") - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputBinding' field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - ) - ) - else: - outputBinding = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "outputSource" in _doc: - try: - outputSource = load_field( - _doc.get("outputSource"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputSource' field is not valid because:", - SourceLine(_doc, "outputSource", str), - [e], - ) - ) - else: - outputSource = None - if "linkMerge" in _doc: - try: - linkMerge = load_field( - _doc.get("linkMerge"), - union_of_None_type_or_LinkMergeMethodLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'linkMerge' field is not valid because:", - SourceLine(_doc, "linkMerge", str), - [e], - ) - ) - else: - linkMerge = None - if "type" in _doc: - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - else: - type = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`, `outputSource`, `linkMerge`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'WorkflowOutputParameter'", None, _errors__ - ) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - outputBinding=outputBinding, - format=format, - outputSource=outputSource, - linkMerge=linkMerge, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputBinding is not None and "outputBinding" not in r: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputBinding", - val=r.get("outputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputSource is not None and "outputSource" not in r: - u = save_relative_uri( - self.outputSource, str(self.id), False, 1, relative_uris - ) - r["outputSource"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputSource", - val=r.get("outputSource"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.linkMerge is not None and "linkMerge" not in r: - r["linkMerge"] = save( - self.linkMerge, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="linkMerge", - val=r.get("linkMerge"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "outputBinding", - "format", - "outputSource", - "linkMerge", - "type", - ] - ) - - -class Sink(Saveable): - pass - - -class WorkflowStepInput(Sink): - """ - The input of a workflow step connects an upstream parameter (from the - workflow inputs, or the outputs of other workflows steps) with the input - parameters of the underlying step. - - ## Input object - - A WorkflowStepInput object must contain an `id` field in the form - `#fieldname` or `#prefix/fieldname`. When the `id` field contains a slash - `/` the field name consists of the characters following the final slash - (the prefix portion may contain one or more slashes to indicate scope). - This defines a field of the workflow step input object with the value of - the `source` parameter(s). - - ## Merging - - To merge multiple inbound data links, - [MultipleInputFeatureRequirement](#MultipleInputFeatureRequirement) must be specified - in the workflow or workflow step requirements. - - If the sink parameter is an array, or named in a [workflow - scatter](#WorkflowStep) operation, there may be multiple inbound data links - listed in the `source` field. The values from the input links are merged - depending on the method specified in the `linkMerge` field. If not - specified, the default method is "merge_nested". - - * **merge_nested** - - The input must be an array consisting of exactly one entry for each - input link. If "merge_nested" is specified with a single link, the value - from the link must be wrapped in a single-item list. - - * **merge_flattened** - - 1. The source and sink parameters must be compatible types, or the source - type must be compatible with single element from the "items" type of - the destination array parameter. - 2. Source parameters which are arrays are concatenated. - Source parameters which are single element types are appended as - single elements. - - """ - - def __init__( - self, - id: Any, - source: Optional[Any] = None, - linkMerge: Optional[Any] = None, - default: Optional[Any] = None, - valueFrom: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.source = source - self.linkMerge = linkMerge - self.id = id - self.default = default - self.valueFrom = valueFrom - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowStepInput): - return bool( - self.source == other.source - and self.linkMerge == other.linkMerge - and self.id == other.id - and self.default == other.default - and self.valueFrom == other.valueFrom - ) - return False - - def __hash__(self) -> int: - return hash( - (self.source, self.linkMerge, self.id, self.default, self.valueFrom) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "WorkflowStepInput": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - raise ValidationException("Missing id") - if not __original_id_is_none: - baseuri = id - if "source" in _doc: - try: - source = load_field( - _doc.get("source"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'source' field is not valid because:", - SourceLine(_doc, "source", str), - [e], - ) - ) - else: - source = None - if "linkMerge" in _doc: - try: - linkMerge = load_field( - _doc.get("linkMerge"), - union_of_None_type_or_LinkMergeMethodLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'linkMerge' field is not valid because:", - SourceLine(_doc, "linkMerge", str), - [e], - ) - ) - else: - linkMerge = None - if "default" in _doc: - try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'default' field is not valid because:", - SourceLine(_doc, "default", str), - [e], - ) - ) - else: - default = None - if "valueFrom" in _doc: - try: - valueFrom = load_field( - _doc.get("valueFrom"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'valueFrom' field is not valid because:", - SourceLine(_doc, "valueFrom", str), - [e], - ) - ) - else: - valueFrom = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `source`, `linkMerge`, `id`, `default`, `valueFrom`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'WorkflowStepInput'", None, _errors__) - _constructed = cls( - source=source, - linkMerge=linkMerge, - id=id, - default=default, - valueFrom=valueFrom, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.source is not None and "source" not in r: - u = save_relative_uri(self.source, str(self.id), False, 2, relative_uris) - r["source"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="source", - val=r.get("source"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.linkMerge is not None and "linkMerge" not in r: - r["linkMerge"] = save( - self.linkMerge, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="linkMerge", - val=r.get("linkMerge"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.default is not None and "default" not in r: - r["default"] = save( - self.default, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="default", - val=r.get("default"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.valueFrom is not None and "valueFrom" not in r: - r["valueFrom"] = save( - self.valueFrom, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="valueFrom", - val=r.get("valueFrom"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["source", "linkMerge", "id", "default", "valueFrom"]) - - -class WorkflowStepOutput(Saveable): - """ - Associate an output parameter of the underlying process with a workflow - parameter. The workflow parameter (given in the `id` field) be may be used - as a `source` to connect with input parameters of other workflow steps, or - with an output parameter of the process. - - """ - - def __init__( - self, - id: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowStepOutput): - return bool(self.id == other.id) - return False - - def __hash__(self) -> int: - return hash((self.id)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "WorkflowStepOutput": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - raise ValidationException("Missing id") - if not __original_id_is_none: - baseuri = id - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'WorkflowStepOutput'", None, _errors__) - _constructed = cls( - id=id, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["id"]) - - -class WorkflowStep(Saveable): - """ - A workflow step is an executable element of a workflow. It specifies the - underlying process implementation (such as `CommandLineTool` or another - `Workflow`) in the `run` field and connects the input and output parameters - of the underlying process to workflow parameters. - - # Scatter/gather - - To use scatter/gather, - [ScatterFeatureRequirement](#ScatterFeatureRequirement) must be specified - in the workflow or workflow step requirements. - - A "scatter" operation specifies that the associated workflow step or - subworkflow should execute separately over a list of input elements. Each - job making up a scatter operation is independent and may be executed - concurrently. - - The `scatter` field specifies one or more input parameters which will be - scattered. An input parameter may be listed more than once. The declared - type of each input parameter is implicitly becomes an array of items of the - input parameter type. If a parameter is listed more than once, it becomes - a nested array. As a result, upstream parameters which are connected to - scattered parameters must be arrays. - - All output parameter types are also implicitly wrapped in arrays. Each job - in the scatter results in an entry in the output array. - - If any scattered parameter runtime value is an empty array, all outputs are - set to empty arrays and no work is done for the step, according to - applicable scattering rules. - - If `scatter` declares more than one input parameter, `scatterMethod` - describes how to decompose the input into a discrete set of jobs. - - * **dotproduct** specifies that each of the input arrays are aligned and one - element taken from each array to construct each job. It is an error - if all input arrays are not the same length. - - * **nested_crossproduct** specifies the Cartesian product of the inputs, - producing a job for every combination of the scattered inputs. The - output must be nested arrays for each level of scattering, in the - order that the input arrays are listed in the `scatter` field. - - * **flat_crossproduct** specifies the Cartesian product of the inputs, - producing a job for every combination of the scattered inputs. The - output arrays must be flattened to a single level, but otherwise listed in the - order that the input arrays are listed in the `scatter` field. - - # Subworkflows - - To specify a nested workflow as part of a workflow step, - [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) must be - specified in the workflow or workflow step requirements. - - It is a fatal error if a workflow directly or indirectly invokes itself as - a subworkflow (recursive workflows are not allowed). - - """ - - def __init__( - self, - id: Any, - in_: Any, - out: Any, - run: Any, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - scatter: Optional[Any] = None, - scatterMethod: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - self.in_ = in_ - self.out = out - self.requirements = requirements - self.hints = hints - self.label = label - self.doc = doc - self.run = run - self.scatter = scatter - self.scatterMethod = scatterMethod - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowStep): - return bool( - self.id == other.id - and self.in_ == other.in_ - and self.out == other.out - and self.requirements == other.requirements - and self.hints == other.hints - and self.label == other.label - and self.doc == other.doc - and self.run == other.run - and self.scatter == other.scatter - and self.scatterMethod == other.scatterMethod - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.in_, - self.out, - self.requirements, - self.hints, - self.label, - self.doc, - self.run, - self.scatter, - self.scatterMethod, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "WorkflowStep": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - raise ValidationException("Missing id") - if not __original_id_is_none: - baseuri = id - try: - in_ = load_field( - _doc.get("in"), - idmap_in__array_of_WorkflowStepInputLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'in' field is not valid because:", - SourceLine(_doc, "in", str), - [e], - ) - ) - try: - out = load_field( - _doc.get("out"), - uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'out' field is not valid because:", - SourceLine(_doc, "out", str), - [e], - ) - ) - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'requirements' field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - ) - ) - else: - requirements = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'hints' field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - ) - ) - else: - hints = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - run = load_field( - _doc.get("run"), - uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'run' field is not valid because:", - SourceLine(_doc, "run", str), - [e], - ) - ) - if "scatter" in _doc: - try: - scatter = load_field( - _doc.get("scatter"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'scatter' field is not valid because:", - SourceLine(_doc, "scatter", str), - [e], - ) - ) - else: - scatter = None - if "scatterMethod" in _doc: - try: - scatterMethod = load_field( - _doc.get("scatterMethod"), - uri_union_of_None_type_or_ScatterMethodLoader_False_True_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'scatterMethod' field is not valid because:", - SourceLine(_doc, "scatterMethod", str), - [e], - ) - ) - else: - scatterMethod = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `in`, `out`, `requirements`, `hints`, `label`, `doc`, `run`, `scatter`, `scatterMethod`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'WorkflowStep'", None, _errors__) - _constructed = cls( - id=id, - in_=in_, - out=out, - requirements=requirements, - hints=hints, - label=label, - doc=doc, - run=run, - scatter=scatter, - scatterMethod=scatterMethod, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.in_ is not None and "in" not in r: - r["in"] = save( - self.in_, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="in", - val=r.get("in"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.out is not None and "out" not in r: - u = save_relative_uri(self.out, str(self.id), True, None, relative_uris) - r["out"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="out", - val=r.get("out"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.requirements is not None and "requirements" not in r: - r["requirements"] = save( - self.requirements, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="requirements", - val=r.get("requirements"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.hints is not None and "hints" not in r: - r["hints"] = save( - self.hints, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="hints", - val=r.get("hints"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.run is not None and "run" not in r: - u = save_relative_uri(self.run, str(self.id), False, None, relative_uris) - r["run"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="run", - val=r.get("run"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.scatter is not None and "scatter" not in r: - u = save_relative_uri(self.scatter, str(self.id), False, 0, relative_uris) - r["scatter"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="scatter", - val=r.get("scatter"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.scatterMethod is not None and "scatterMethod" not in r: - u = save_relative_uri( - self.scatterMethod, str(self.id), False, None, relative_uris - ) - r["scatterMethod"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="scatterMethod", - val=r.get("scatterMethod"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "in", - "out", - "requirements", - "hints", - "label", - "doc", - "run", - "scatter", - "scatterMethod", - ] - ) - - -class Workflow(Process): - """ - A workflow describes a set of **steps** and the **dependencies** between - those steps. When a step produces output that will be consumed by a - second step, the first step is a dependency of the second step. - - When there is a dependency, the workflow engine must execute the preceding - step and wait for it to successfully produce output before executing the - dependent step. If two steps are defined in the workflow graph that - are not directly or indirectly dependent, these steps are **independent**, - and may execute in any order or execute concurrently. A workflow is - complete when all steps have been executed. - - Dependencies between parameters are expressed using the `source` field on - [workflow step input parameters](#WorkflowStepInput) and [workflow output - parameters](#WorkflowOutputParameter). - - The `source` field expresses the dependency of one parameter on another - such that when a value is associated with the parameter specified by - `source`, that value is propagated to the destination parameter. When all - data links inbound to a given step are fufilled, the step is ready to - execute. - - ## Workflow success and failure - - A completed step must result in one of `success`, `temporaryFailure` or - `permanentFailure` states. An implementation may choose to retry a step - execution which resulted in `temporaryFailure`. An implementation may - choose to either continue running other steps of a workflow, or terminate - immediately upon `permanentFailure`. - - * If any step of a workflow execution results in `permanentFailure`, then - the workflow status is `permanentFailure`. - - * If one or more steps result in `temporaryFailure` and all other steps - complete `success` or are not executed, then the workflow status is - `temporaryFailure`. - - * If all workflow steps are executed and complete with `success`, then the - workflow status is `success`. - - # Extensions - - [ScatterFeatureRequirement](#ScatterFeatureRequirement) and - [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) are - available as standard [extensions](#Extensions_and_Metadata) to core - workflow semantics. - - """ - - def __init__( - self, - inputs: Any, - outputs: Any, - steps: Any, - id: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.label = label - self.doc = doc - self.cwlVersion = cwlVersion - self.class_ = "Workflow" - self.steps = steps - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Workflow): - return bool( - self.id == other.id - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.label == other.label - and self.doc == other.doc - and self.cwlVersion == other.cwlVersion - and self.class_ == other.class_ - and self.steps == other.steps - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.label, - self.doc, - self.cwlVersion, - self.class_, - self.steps, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "Workflow": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "Workflow": - raise ValidationException("Not a Workflow") - - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - try: - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_InputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputs' field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - ) - ) - try: - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_WorkflowOutputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputs' field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - ) - ) - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'requirements' field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - ) - ) - else: - requirements = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'hints' field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - ) - ) - else: - hints = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "cwlVersion" in _doc: - try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'cwlVersion' field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [e], - ) - ) - else: - cwlVersion = None - try: - steps = load_field( - _doc.get("steps"), - idmap_steps_union_of_array_of_WorkflowStepLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'steps' field is not valid because:", - SourceLine(_doc, "steps", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `inputs`, `outputs`, `requirements`, `hints`, `label`, `doc`, `cwlVersion`, `class`, `steps`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'Workflow'", None, _errors__) - _constructed = cls( - id=id, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - label=label, - doc=doc, - cwlVersion=cwlVersion, - steps=steps, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "Workflow" - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputs is not None and "inputs" not in r: - r["inputs"] = save( - self.inputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputs", - val=r.get("inputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputs is not None and "outputs" not in r: - r["outputs"] = save( - self.outputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputs", - val=r.get("outputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.requirements is not None and "requirements" not in r: - r["requirements"] = save( - self.requirements, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="requirements", - val=r.get("requirements"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.hints is not None and "hints" not in r: - r["hints"] = save( - self.hints, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="hints", - val=r.get("hints"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) - r["cwlVersion"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="cwlVersion", - val=r.get("cwlVersion"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.steps is not None and "steps" not in r: - r["steps"] = save( - self.steps, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="steps", - val=r.get("steps"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "inputs", - "outputs", - "requirements", - "hints", - "label", - "doc", - "cwlVersion", - "class", - "steps", - ] - ) - - -class SubworkflowFeatureRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support nested workflows in - the `run` field of [WorkflowStep](#WorkflowStep). - - """ - - def __init__( - self, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "SubworkflowFeatureRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SubworkflowFeatureRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "SubworkflowFeatureRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "SubworkflowFeatureRequirement": - raise ValidationException("Not a SubworkflowFeatureRequirement") - - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'SubworkflowFeatureRequirement'", None, _errors__ - ) - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "SubworkflowFeatureRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class ScatterFeatureRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support the `scatter` and - `scatterMethod` fields of [WorkflowStep](#WorkflowStep). - - """ - - def __init__( - self, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ScatterFeatureRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ScatterFeatureRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ScatterFeatureRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "ScatterFeatureRequirement": - raise ValidationException("Not a ScatterFeatureRequirement") - - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'ScatterFeatureRequirement'", None, _errors__ - ) - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "ScatterFeatureRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class MultipleInputFeatureRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support multiple inbound data links - listed in the `source` field of [WorkflowStepInput](#WorkflowStepInput). - - """ - - def __init__( - self, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "MultipleInputFeatureRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, MultipleInputFeatureRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "MultipleInputFeatureRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "MultipleInputFeatureRequirement": - raise ValidationException("Not a MultipleInputFeatureRequirement") - - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'MultipleInputFeatureRequirement'", None, _errors__ - ) - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "MultipleInputFeatureRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class StepInputExpressionRequirement(ProcessRequirement): - """ - Indicate that the workflow platform must support the `valueFrom` field - of [WorkflowStepInput](#WorkflowStepInput). - - """ - - def __init__( - self, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "StepInputExpressionRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, StepInputExpressionRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "StepInputExpressionRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "StepInputExpressionRequirement": - raise ValidationException("Not a StepInputExpressionRequirement") - - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'StepInputExpressionRequirement'", None, _errors__ - ) - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "StepInputExpressionRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -_vocab = { - "Any": "https://w3id.org/cwl/salad#Any", - "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", - "CWLType": "https://w3id.org/cwl/cwl#CWLType", - "CWLVersion": "https://w3id.org/cwl/cwl#CWLVersion", - "CommandInputArraySchema": "https://w3id.org/cwl/cwl#CommandInputArraySchema", - "CommandInputEnumSchema": "https://w3id.org/cwl/cwl#CommandInputEnumSchema", - "CommandInputParameter": "https://w3id.org/cwl/cwl#CommandInputParameter", - "CommandInputRecordField": "https://w3id.org/cwl/cwl#CommandInputRecordField", - "CommandInputRecordSchema": "https://w3id.org/cwl/cwl#CommandInputRecordSchema", - "CommandLineBinding": "https://w3id.org/cwl/cwl#CommandLineBinding", - "CommandLineTool": "https://w3id.org/cwl/cwl#CommandLineTool", - "CommandOutputArraySchema": "https://w3id.org/cwl/cwl#CommandOutputArraySchema", - "CommandOutputBinding": "https://w3id.org/cwl/cwl#CommandOutputBinding", - "CommandOutputEnumSchema": "https://w3id.org/cwl/cwl#CommandOutputEnumSchema", - "CommandOutputParameter": "https://w3id.org/cwl/cwl#CommandOutputParameter", - "CommandOutputRecordField": "https://w3id.org/cwl/cwl#CommandOutputRecordField", - "CommandOutputRecordSchema": "https://w3id.org/cwl/cwl#CommandOutputRecordSchema", - "Directory": "https://w3id.org/cwl/cwl#Directory", - "Dirent": "https://w3id.org/cwl/cwl#Dirent", - "DockerRequirement": "https://w3id.org/cwl/cwl#DockerRequirement", - "EnumSchema": "https://w3id.org/cwl/salad#EnumSchema", - "EnvVarRequirement": "https://w3id.org/cwl/cwl#EnvVarRequirement", - "EnvironmentDef": "https://w3id.org/cwl/cwl#EnvironmentDef", - "Expression": "https://w3id.org/cwl/cwl#Expression", - "ExpressionPlaceholder": "https://w3id.org/cwl/cwl#ExpressionPlaceholder", - "ExpressionTool": "https://w3id.org/cwl/cwl#ExpressionTool", - "ExpressionToolOutputParameter": "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter", - "File": "https://w3id.org/cwl/cwl#File", - "InitialWorkDirRequirement": "https://w3id.org/cwl/cwl#InitialWorkDirRequirement", - "InlineJavascriptRequirement": "https://w3id.org/cwl/cwl#InlineJavascriptRequirement", - "InputArraySchema": "https://w3id.org/cwl/cwl#InputArraySchema", - "InputBinding": "https://w3id.org/cwl/cwl#InputBinding", - "InputEnumSchema": "https://w3id.org/cwl/cwl#InputEnumSchema", - "InputParameter": "https://w3id.org/cwl/cwl#InputParameter", - "InputRecordField": "https://w3id.org/cwl/cwl#InputRecordField", - "InputRecordSchema": "https://w3id.org/cwl/cwl#InputRecordSchema", - "InputSchema": "https://w3id.org/cwl/cwl#InputSchema", - "LinkMergeMethod": "https://w3id.org/cwl/cwl#LinkMergeMethod", - "MultipleInputFeatureRequirement": "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement", - "OutputArraySchema": "https://w3id.org/cwl/cwl#OutputArraySchema", - "OutputBinding": "https://w3id.org/cwl/cwl#OutputBinding", - "OutputEnumSchema": "https://w3id.org/cwl/cwl#OutputEnumSchema", - "OutputParameter": "https://w3id.org/cwl/cwl#OutputParameter", - "OutputRecordField": "https://w3id.org/cwl/cwl#OutputRecordField", - "OutputRecordSchema": "https://w3id.org/cwl/cwl#OutputRecordSchema", - "OutputSchema": "https://w3id.org/cwl/cwl#OutputSchema", - "Parameter": "https://w3id.org/cwl/cwl#Parameter", - "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", - "Process": "https://w3id.org/cwl/cwl#Process", - "ProcessRequirement": "https://w3id.org/cwl/cwl#ProcessRequirement", - "RecordField": "https://w3id.org/cwl/salad#RecordField", - "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", - "ResourceRequirement": "https://w3id.org/cwl/cwl#ResourceRequirement", - "ScatterFeatureRequirement": "https://w3id.org/cwl/cwl#ScatterFeatureRequirement", - "ScatterMethod": "https://w3id.org/cwl/cwl#ScatterMethod", - "SchemaBase": "https://w3id.org/cwl/cwl#SchemaBase", - "SchemaDefRequirement": "https://w3id.org/cwl/cwl#SchemaDefRequirement", - "ShellCommandRequirement": "https://w3id.org/cwl/cwl#ShellCommandRequirement", - "Sink": "https://w3id.org/cwl/cwl#Sink", - "SoftwarePackage": "https://w3id.org/cwl/cwl#SoftwarePackage", - "SoftwareRequirement": "https://w3id.org/cwl/cwl#SoftwareRequirement", - "StepInputExpressionRequirement": "https://w3id.org/cwl/cwl#StepInputExpressionRequirement", - "SubworkflowFeatureRequirement": "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement", - "Workflow": "https://w3id.org/cwl/cwl#Workflow", - "WorkflowOutputParameter": "https://w3id.org/cwl/cwl#WorkflowOutputParameter", - "WorkflowStep": "https://w3id.org/cwl/cwl#WorkflowStep", - "WorkflowStepInput": "https://w3id.org/cwl/cwl#WorkflowStepInput", - "WorkflowStepOutput": "https://w3id.org/cwl/cwl#WorkflowStepOutput", - "array": "https://w3id.org/cwl/salad#array", - "boolean": "http://www.w3.org/2001/XMLSchema#boolean", - "dotproduct": "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct", - "double": "http://www.w3.org/2001/XMLSchema#double", - "draft-2": "https://w3id.org/cwl/cwl#draft-2", - "draft-3": "https://w3id.org/cwl/cwl#draft-3", - "draft-3.dev1": "https://w3id.org/cwl/cwl#draft-3.dev1", - "draft-3.dev2": "https://w3id.org/cwl/cwl#draft-3.dev2", - "draft-3.dev3": "https://w3id.org/cwl/cwl#draft-3.dev3", - "draft-3.dev4": "https://w3id.org/cwl/cwl#draft-3.dev4", - "draft-3.dev5": "https://w3id.org/cwl/cwl#draft-3.dev5", - "draft-4.dev1": "https://w3id.org/cwl/cwl#draft-4.dev1", - "draft-4.dev2": "https://w3id.org/cwl/cwl#draft-4.dev2", - "draft-4.dev3": "https://w3id.org/cwl/cwl#draft-4.dev3", - "enum": "https://w3id.org/cwl/salad#enum", - "flat_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct", - "float": "http://www.w3.org/2001/XMLSchema#float", - "int": "http://www.w3.org/2001/XMLSchema#int", - "long": "http://www.w3.org/2001/XMLSchema#long", - "merge_flattened": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened", - "merge_nested": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested", - "nested_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct", - "null": "https://w3id.org/cwl/salad#null", - "record": "https://w3id.org/cwl/salad#record", - "stderr": "https://w3id.org/cwl/cwl#stderr", - "stdout": "https://w3id.org/cwl/cwl#stdout", - "string": "http://www.w3.org/2001/XMLSchema#string", - "v1.0": "https://w3id.org/cwl/cwl#v1.0", - "v1.0.dev4": "https://w3id.org/cwl/cwl#v1.0.dev4", -} -_rvocab = { - "https://w3id.org/cwl/salad#Any": "Any", - "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", - "https://w3id.org/cwl/cwl#CWLType": "CWLType", - "https://w3id.org/cwl/cwl#CWLVersion": "CWLVersion", - "https://w3id.org/cwl/cwl#CommandInputArraySchema": "CommandInputArraySchema", - "https://w3id.org/cwl/cwl#CommandInputEnumSchema": "CommandInputEnumSchema", - "https://w3id.org/cwl/cwl#CommandInputParameter": "CommandInputParameter", - "https://w3id.org/cwl/cwl#CommandInputRecordField": "CommandInputRecordField", - "https://w3id.org/cwl/cwl#CommandInputRecordSchema": "CommandInputRecordSchema", - "https://w3id.org/cwl/cwl#CommandLineBinding": "CommandLineBinding", - "https://w3id.org/cwl/cwl#CommandLineTool": "CommandLineTool", - "https://w3id.org/cwl/cwl#CommandOutputArraySchema": "CommandOutputArraySchema", - "https://w3id.org/cwl/cwl#CommandOutputBinding": "CommandOutputBinding", - "https://w3id.org/cwl/cwl#CommandOutputEnumSchema": "CommandOutputEnumSchema", - "https://w3id.org/cwl/cwl#CommandOutputParameter": "CommandOutputParameter", - "https://w3id.org/cwl/cwl#CommandOutputRecordField": "CommandOutputRecordField", - "https://w3id.org/cwl/cwl#CommandOutputRecordSchema": "CommandOutputRecordSchema", - "https://w3id.org/cwl/cwl#Directory": "Directory", - "https://w3id.org/cwl/cwl#Dirent": "Dirent", - "https://w3id.org/cwl/cwl#DockerRequirement": "DockerRequirement", - "https://w3id.org/cwl/salad#EnumSchema": "EnumSchema", - "https://w3id.org/cwl/cwl#EnvVarRequirement": "EnvVarRequirement", - "https://w3id.org/cwl/cwl#EnvironmentDef": "EnvironmentDef", - "https://w3id.org/cwl/cwl#Expression": "Expression", - "https://w3id.org/cwl/cwl#ExpressionPlaceholder": "ExpressionPlaceholder", - "https://w3id.org/cwl/cwl#ExpressionTool": "ExpressionTool", - "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter": "ExpressionToolOutputParameter", - "https://w3id.org/cwl/cwl#File": "File", - "https://w3id.org/cwl/cwl#InitialWorkDirRequirement": "InitialWorkDirRequirement", - "https://w3id.org/cwl/cwl#InlineJavascriptRequirement": "InlineJavascriptRequirement", - "https://w3id.org/cwl/cwl#InputArraySchema": "InputArraySchema", - "https://w3id.org/cwl/cwl#InputBinding": "InputBinding", - "https://w3id.org/cwl/cwl#InputEnumSchema": "InputEnumSchema", - "https://w3id.org/cwl/cwl#InputParameter": "InputParameter", - "https://w3id.org/cwl/cwl#InputRecordField": "InputRecordField", - "https://w3id.org/cwl/cwl#InputRecordSchema": "InputRecordSchema", - "https://w3id.org/cwl/cwl#InputSchema": "InputSchema", - "https://w3id.org/cwl/cwl#LinkMergeMethod": "LinkMergeMethod", - "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement": "MultipleInputFeatureRequirement", - "https://w3id.org/cwl/cwl#OutputArraySchema": "OutputArraySchema", - "https://w3id.org/cwl/cwl#OutputBinding": "OutputBinding", - "https://w3id.org/cwl/cwl#OutputEnumSchema": "OutputEnumSchema", - "https://w3id.org/cwl/cwl#OutputParameter": "OutputParameter", - "https://w3id.org/cwl/cwl#OutputRecordField": "OutputRecordField", - "https://w3id.org/cwl/cwl#OutputRecordSchema": "OutputRecordSchema", - "https://w3id.org/cwl/cwl#OutputSchema": "OutputSchema", - "https://w3id.org/cwl/cwl#Parameter": "Parameter", - "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", - "https://w3id.org/cwl/cwl#Process": "Process", - "https://w3id.org/cwl/cwl#ProcessRequirement": "ProcessRequirement", - "https://w3id.org/cwl/salad#RecordField": "RecordField", - "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", - "https://w3id.org/cwl/cwl#ResourceRequirement": "ResourceRequirement", - "https://w3id.org/cwl/cwl#ScatterFeatureRequirement": "ScatterFeatureRequirement", - "https://w3id.org/cwl/cwl#ScatterMethod": "ScatterMethod", - "https://w3id.org/cwl/cwl#SchemaBase": "SchemaBase", - "https://w3id.org/cwl/cwl#SchemaDefRequirement": "SchemaDefRequirement", - "https://w3id.org/cwl/cwl#ShellCommandRequirement": "ShellCommandRequirement", - "https://w3id.org/cwl/cwl#Sink": "Sink", - "https://w3id.org/cwl/cwl#SoftwarePackage": "SoftwarePackage", - "https://w3id.org/cwl/cwl#SoftwareRequirement": "SoftwareRequirement", - "https://w3id.org/cwl/cwl#StepInputExpressionRequirement": "StepInputExpressionRequirement", - "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement": "SubworkflowFeatureRequirement", - "https://w3id.org/cwl/cwl#Workflow": "Workflow", - "https://w3id.org/cwl/cwl#WorkflowOutputParameter": "WorkflowOutputParameter", - "https://w3id.org/cwl/cwl#WorkflowStep": "WorkflowStep", - "https://w3id.org/cwl/cwl#WorkflowStepInput": "WorkflowStepInput", - "https://w3id.org/cwl/cwl#WorkflowStepOutput": "WorkflowStepOutput", - "https://w3id.org/cwl/salad#array": "array", - "http://www.w3.org/2001/XMLSchema#boolean": "boolean", - "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct": "dotproduct", - "http://www.w3.org/2001/XMLSchema#double": "double", - "https://w3id.org/cwl/cwl#draft-2": "draft-2", - "https://w3id.org/cwl/cwl#draft-3": "draft-3", - "https://w3id.org/cwl/cwl#draft-3.dev1": "draft-3.dev1", - "https://w3id.org/cwl/cwl#draft-3.dev2": "draft-3.dev2", - "https://w3id.org/cwl/cwl#draft-3.dev3": "draft-3.dev3", - "https://w3id.org/cwl/cwl#draft-3.dev4": "draft-3.dev4", - "https://w3id.org/cwl/cwl#draft-3.dev5": "draft-3.dev5", - "https://w3id.org/cwl/cwl#draft-4.dev1": "draft-4.dev1", - "https://w3id.org/cwl/cwl#draft-4.dev2": "draft-4.dev2", - "https://w3id.org/cwl/cwl#draft-4.dev3": "draft-4.dev3", - "https://w3id.org/cwl/salad#enum": "enum", - "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct": "flat_crossproduct", - "http://www.w3.org/2001/XMLSchema#float": "float", - "http://www.w3.org/2001/XMLSchema#int": "int", - "http://www.w3.org/2001/XMLSchema#long": "long", - "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened": "merge_flattened", - "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested": "merge_nested", - "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct": "nested_crossproduct", - "https://w3id.org/cwl/salad#null": "null", - "https://w3id.org/cwl/salad#record": "record", - "https://w3id.org/cwl/cwl#stderr": "stderr", - "https://w3id.org/cwl/cwl#stdout": "stdout", - "http://www.w3.org/2001/XMLSchema#string": "string", - "https://w3id.org/cwl/cwl#v1.0": "v1.0", - "https://w3id.org/cwl/cwl#v1.0.dev4": "v1.0.dev4", -} - -strtype = _PrimitiveLoader(str) -inttype = _PrimitiveLoader(int) -floattype = _PrimitiveLoader(float) -booltype = _PrimitiveLoader(bool) -None_type = _PrimitiveLoader(type(None)) -Any_type = _AnyLoader() -PrimitiveTypeLoader = _EnumLoader( - ( - "null", - "boolean", - "int", - "long", - "float", - "double", - "string", - ), - "PrimitiveType", -) -""" -Salad data types are based on Avro schema declarations. Refer to the -[Avro schema declaration documentation](https://avro.apache.org/docs/current/spec.html#schemas) for -detailed information. - -null: no value -boolean: a binary value -int: 32-bit signed integer -long: 64-bit signed integer -float: single precision (32-bit) IEEE 754 floating-point number -double: double precision (64-bit) IEEE 754 floating-point number -string: Unicode character sequence -""" -AnyLoader = _EnumLoader(("Any",), "Any") -""" -The **Any** type validates for any non-null value. -""" -RecordFieldLoader = _RecordLoader(RecordField) -RecordSchemaLoader = _RecordLoader(RecordSchema) -EnumSchemaLoader = _RecordLoader(EnumSchema) -ArraySchemaLoader = _RecordLoader(ArraySchema) -CWLVersionLoader = _EnumLoader( - ( - "draft-2", - "draft-3.dev1", - "draft-3.dev2", - "draft-3.dev3", - "draft-3.dev4", - "draft-3.dev5", - "draft-3", - "draft-4.dev1", - "draft-4.dev2", - "draft-4.dev3", - "v1.0.dev4", - "v1.0", - ), - "CWLVersion", -) -""" -Version symbols for published CWL document versions. -""" -CWLTypeLoader = _EnumLoader( - ( - "null", - "boolean", - "int", - "long", - "float", - "double", - "string", - "File", - "Directory", - ), - "CWLType", -) -""" -Extends primitive types with the concept of a file and directory as a builtin type. -File: A File object -Directory: A Directory object -""" -FileLoader = _RecordLoader(File) -DirectoryLoader = _RecordLoader(Directory) -ExpressionLoader = _ExpressionLoader(str) -InputRecordFieldLoader = _RecordLoader(InputRecordField) -InputRecordSchemaLoader = _RecordLoader(InputRecordSchema) -InputEnumSchemaLoader = _RecordLoader(InputEnumSchema) -InputArraySchemaLoader = _RecordLoader(InputArraySchema) -OutputRecordFieldLoader = _RecordLoader(OutputRecordField) -OutputRecordSchemaLoader = _RecordLoader(OutputRecordSchema) -OutputEnumSchemaLoader = _RecordLoader(OutputEnumSchema) -OutputArraySchemaLoader = _RecordLoader(OutputArraySchema) -InputParameterLoader = _RecordLoader(InputParameter) -OutputParameterLoader = _RecordLoader(OutputParameter) -InlineJavascriptRequirementLoader = _RecordLoader(InlineJavascriptRequirement) -SchemaDefRequirementLoader = _RecordLoader(SchemaDefRequirement) -EnvironmentDefLoader = _RecordLoader(EnvironmentDef) -CommandLineBindingLoader = _RecordLoader(CommandLineBinding) -CommandOutputBindingLoader = _RecordLoader(CommandOutputBinding) -CommandInputRecordFieldLoader = _RecordLoader(CommandInputRecordField) -CommandInputRecordSchemaLoader = _RecordLoader(CommandInputRecordSchema) -CommandInputEnumSchemaLoader = _RecordLoader(CommandInputEnumSchema) -CommandInputArraySchemaLoader = _RecordLoader(CommandInputArraySchema) -CommandOutputRecordFieldLoader = _RecordLoader(CommandOutputRecordField) -CommandOutputRecordSchemaLoader = _RecordLoader(CommandOutputRecordSchema) -CommandOutputEnumSchemaLoader = _RecordLoader(CommandOutputEnumSchema) -CommandOutputArraySchemaLoader = _RecordLoader(CommandOutputArraySchema) -CommandInputParameterLoader = _RecordLoader(CommandInputParameter) -CommandOutputParameterLoader = _RecordLoader(CommandOutputParameter) -stdoutLoader = _EnumLoader(("stdout",), "stdout") -""" -Only valid as a `type` for a `CommandLineTool` output with no -`outputBinding` set. - -The following -``` -outputs: - an_output_name: - type: stdout - -stdout: a_stdout_file -``` -is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: a_stdout_file - -stdout: a_stdout_file -``` - -If there is no `stdout` name provided, a random filename will be created. -For example, the following -``` -outputs: - an_output_name: - type: stdout -``` -is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: random_stdout_filenameABCDEFG - -stdout: random_stdout_filenameABCDEFG -``` -""" -stderrLoader = _EnumLoader(("stderr",), "stderr") -""" -Only valid as a `type` for a `CommandLineTool` output with no -`outputBinding` set. - -The following -``` -outputs: - an_output_name: - type: stderr - -stderr: a_stderr_file -``` -is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: a_stderr_file - -stderr: a_stderr_file -``` - -If there is no `stderr` name provided, a random filename will be created. -For example, the following -``` -outputs: - an_output_name: - type: stderr -``` -is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: random_stderr_filenameABCDEFG - -stderr: random_stderr_filenameABCDEFG -``` -""" -CommandLineToolLoader = _RecordLoader(CommandLineTool) -DockerRequirementLoader = _RecordLoader(DockerRequirement) -SoftwareRequirementLoader = _RecordLoader(SoftwareRequirement) -SoftwarePackageLoader = _RecordLoader(SoftwarePackage) -DirentLoader = _RecordLoader(Dirent) -InitialWorkDirRequirementLoader = _RecordLoader(InitialWorkDirRequirement) -EnvVarRequirementLoader = _RecordLoader(EnvVarRequirement) -ShellCommandRequirementLoader = _RecordLoader(ShellCommandRequirement) -ResourceRequirementLoader = _RecordLoader(ResourceRequirement) -ExpressionToolOutputParameterLoader = _RecordLoader(ExpressionToolOutputParameter) -ExpressionToolLoader = _RecordLoader(ExpressionTool) -LinkMergeMethodLoader = _EnumLoader( - ( - "merge_nested", - "merge_flattened", - ), - "LinkMergeMethod", -) -""" -The input link merge method, described in [WorkflowStepInput](#WorkflowStepInput). -""" -WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter) -WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput) -WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput) -ScatterMethodLoader = _EnumLoader( - ( - "dotproduct", - "nested_crossproduct", - "flat_crossproduct", - ), - "ScatterMethod", -) -""" -The scatter method, as described in [workflow step scatter](#WorkflowStep). -""" -WorkflowStepLoader = _RecordLoader(WorkflowStep) -WorkflowLoader = _RecordLoader(Workflow) -SubworkflowFeatureRequirementLoader = _RecordLoader(SubworkflowFeatureRequirement) -ScatterFeatureRequirementLoader = _RecordLoader(ScatterFeatureRequirement) -MultipleInputFeatureRequirementLoader = _RecordLoader(MultipleInputFeatureRequirement) -StepInputExpressionRequirementLoader = _RecordLoader(StepInputExpressionRequirement) -uri_strtype_True_False_None = _URILoader(strtype, True, False, None) -union_of_None_type_or_strtype = _UnionLoader( - ( - None_type, - strtype, - ) -) -union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader( - ( - PrimitiveTypeLoader, - RecordSchemaLoader, - EnumSchemaLoader, - ArraySchemaLoader, - strtype, - ) -) -array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _ArrayLoader( - union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype -) -union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader( - ( - PrimitiveTypeLoader, - RecordSchemaLoader, - EnumSchemaLoader, - ArraySchemaLoader, - strtype, - array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, - 2, -) -array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) -union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader( - ( - None_type, - array_of_RecordFieldLoader, - ) -) -idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_RecordFieldLoader, "name", "type" -) -Record_symbolLoader = _EnumLoader(("record",), "Record_symbol") -typedsl_Record_symbolLoader_2 = _TypeDSLLoader(Record_symbolLoader, 2) -array_of_strtype = _ArrayLoader(strtype) -uri_array_of_strtype_True_False_None = _URILoader(array_of_strtype, True, False, None) -Enum_symbolLoader = _EnumLoader(("enum",), "Enum_symbol") -typedsl_Enum_symbolLoader_2 = _TypeDSLLoader(Enum_symbolLoader, 2) -Array_symbolLoader = _EnumLoader(("array",), "Array_symbol") -typedsl_Array_symbolLoader_2 = _TypeDSLLoader(Array_symbolLoader, 2) -File_classLoader = _EnumLoader(("File",), "File_class") -uri_File_classLoader_False_True_None = _URILoader(File_classLoader, False, True, None) -uri_union_of_None_type_or_strtype_False_False_None = _URILoader( - union_of_None_type_or_strtype, False, False, None -) -union_of_None_type_or_inttype = _UnionLoader( - ( - None_type, - inttype, - ) -) -union_of_FileLoader_or_DirectoryLoader = _UnionLoader( - ( - FileLoader, - DirectoryLoader, - ) -) -array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( - union_of_FileLoader_or_DirectoryLoader -) -union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( - ( - None_type, - array_of_union_of_FileLoader_or_DirectoryLoader, - ) -) -uri_union_of_None_type_or_strtype_True_False_None = _URILoader( - union_of_None_type_or_strtype, True, False, None -) -Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") -uri_Directory_classLoader_False_True_None = _URILoader( - Directory_classLoader, False, True, None -) -union_of_strtype_or_ExpressionLoader = _UnionLoader( - ( - strtype, - ExpressionLoader, - ) -) -array_of_union_of_strtype_or_ExpressionLoader = _ArrayLoader( - union_of_strtype_or_ExpressionLoader -) -union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader = _UnionLoader( - ( - None_type, - strtype, - ExpressionLoader, - array_of_union_of_strtype_or_ExpressionLoader, - ) -) -union_of_None_type_or_booltype = _UnionLoader( - ( - None_type, - booltype, - ) -) -union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader( - ( - None_type, - strtype, - array_of_strtype, - ) -) -union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - InputRecordSchemaLoader, - InputEnumSchemaLoader, - InputArraySchemaLoader, - strtype, - ) -) -array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _ArrayLoader( - union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype -) -union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - InputRecordSchemaLoader, - InputEnumSchemaLoader, - InputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, - 2, -) -union_of_None_type_or_CommandLineBindingLoader = _UnionLoader( - ( - None_type, - CommandLineBindingLoader, - ) -) -array_of_InputRecordFieldLoader = _ArrayLoader(InputRecordFieldLoader) -union_of_None_type_or_array_of_InputRecordFieldLoader = _UnionLoader( - ( - None_type, - array_of_InputRecordFieldLoader, - ) -) -idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_InputRecordFieldLoader, "name", "type" -) -union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - OutputRecordSchemaLoader, - OutputEnumSchemaLoader, - OutputArraySchemaLoader, - strtype, - ) -) -array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _ArrayLoader( - union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype -) -union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - OutputRecordSchemaLoader, - OutputEnumSchemaLoader, - OutputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, - 2, -) -union_of_None_type_or_CommandOutputBindingLoader = _UnionLoader( - ( - None_type, - CommandOutputBindingLoader, - ) -) -array_of_OutputRecordFieldLoader = _ArrayLoader(OutputRecordFieldLoader) -union_of_None_type_or_array_of_OutputRecordFieldLoader = _UnionLoader( - ( - None_type, - array_of_OutputRecordFieldLoader, - ) -) -idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_OutputRecordFieldLoader, "name", "type" -) -union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader = _UnionLoader( - ( - None_type, - strtype, - array_of_strtype, - ExpressionLoader, - ) -) -uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, - True, - False, - None, -) -union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type = _UnionLoader( - ( - None_type, - FileLoader, - DirectoryLoader, - Any_type, - ) -) -union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( - ( - None_type, - CWLTypeLoader, - InputRecordSchemaLoader, - InputEnumSchemaLoader, - InputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, - 2, -) -union_of_None_type_or_strtype_or_ExpressionLoader = _UnionLoader( - ( - None_type, - strtype, - ExpressionLoader, - ) -) -uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None = _URILoader( - union_of_None_type_or_strtype_or_ExpressionLoader, True, False, None -) -array_of_InputParameterLoader = _ArrayLoader(InputParameterLoader) -idmap_inputs_array_of_InputParameterLoader = _IdMapLoader( - array_of_InputParameterLoader, "id", "type" -) -array_of_OutputParameterLoader = _ArrayLoader(OutputParameterLoader) -idmap_outputs_array_of_OutputParameterLoader = _IdMapLoader( - array_of_OutputParameterLoader, "id", "type" -) -union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( - ( - InlineJavascriptRequirementLoader, - SchemaDefRequirementLoader, - DockerRequirementLoader, - SoftwareRequirementLoader, - InitialWorkDirRequirementLoader, - EnvVarRequirementLoader, - ShellCommandRequirementLoader, - ResourceRequirementLoader, - SubworkflowFeatureRequirementLoader, - ScatterFeatureRequirementLoader, - MultipleInputFeatureRequirementLoader, - StepInputExpressionRequirementLoader, - ) -) -array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _ArrayLoader( - union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader -) -union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( - ( - None_type, - array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - ) -) -idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _IdMapLoader( - union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - "class", - "None", -) -union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( - ( - InlineJavascriptRequirementLoader, - SchemaDefRequirementLoader, - DockerRequirementLoader, - SoftwareRequirementLoader, - InitialWorkDirRequirementLoader, - EnvVarRequirementLoader, - ShellCommandRequirementLoader, - ResourceRequirementLoader, - SubworkflowFeatureRequirementLoader, - ScatterFeatureRequirementLoader, - MultipleInputFeatureRequirementLoader, - StepInputExpressionRequirementLoader, - Any_type, - ) -) -array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _ArrayLoader( - union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type -) -union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( - ( - None_type, - array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, - ) -) -idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _IdMapLoader( - union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, - "class", - "None", -) -union_of_None_type_or_CWLVersionLoader = _UnionLoader( - ( - None_type, - CWLVersionLoader, - ) -) -uri_union_of_None_type_or_CWLVersionLoader_False_True_None = _URILoader( - union_of_None_type_or_CWLVersionLoader, False, True, None -) -InlineJavascriptRequirement_classLoader = _EnumLoader( - ("InlineJavascriptRequirement",), "InlineJavascriptRequirement_class" -) -uri_InlineJavascriptRequirement_classLoader_False_True_None = _URILoader( - InlineJavascriptRequirement_classLoader, False, True, None -) -union_of_None_type_or_array_of_strtype = _UnionLoader( - ( - None_type, - array_of_strtype, - ) -) -SchemaDefRequirement_classLoader = _EnumLoader( - ("SchemaDefRequirement",), "SchemaDefRequirement_class" -) -uri_SchemaDefRequirement_classLoader_False_True_None = _URILoader( - SchemaDefRequirement_classLoader, False, True, None -) -union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = ( - _UnionLoader( - ( - InputRecordSchemaLoader, - InputEnumSchemaLoader, - InputArraySchemaLoader, - ) - ) -) -array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = _ArrayLoader( - union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader -) -union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype = _UnionLoader( - ( - None_type, - strtype, - ExpressionLoader, - array_of_strtype, - ) -) -union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - CommandInputRecordSchemaLoader, - CommandInputEnumSchemaLoader, - CommandInputArraySchemaLoader, - strtype, - ) -) -array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _ArrayLoader( - union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype -) -union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - CommandInputRecordSchemaLoader, - CommandInputEnumSchemaLoader, - CommandInputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, - 2, -) -array_of_CommandInputRecordFieldLoader = _ArrayLoader(CommandInputRecordFieldLoader) -union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _UnionLoader( - ( - None_type, - array_of_CommandInputRecordFieldLoader, - ) -) -idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( - _IdMapLoader( - union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" - ) -) -union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - CommandOutputRecordSchemaLoader, - CommandOutputEnumSchemaLoader, - CommandOutputArraySchemaLoader, - strtype, - ) -) -array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _ArrayLoader( - union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype -) -union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - CommandOutputRecordSchemaLoader, - CommandOutputEnumSchemaLoader, - CommandOutputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, - 2, -) -array_of_CommandOutputRecordFieldLoader = _ArrayLoader(CommandOutputRecordFieldLoader) -union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _UnionLoader( - ( - None_type, - array_of_CommandOutputRecordFieldLoader, - ) -) -idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( - _IdMapLoader( - union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" - ) -) -union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( - ( - None_type, - CWLTypeLoader, - CommandInputRecordSchemaLoader, - CommandInputEnumSchemaLoader, - CommandInputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, - 2, -) -union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( - ( - None_type, - CWLTypeLoader, - stdoutLoader, - stderrLoader, - CommandOutputRecordSchemaLoader, - CommandOutputEnumSchemaLoader, - CommandOutputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, - 2, -) -CommandLineTool_classLoader = _EnumLoader(("CommandLineTool",), "CommandLineTool_class") -uri_CommandLineTool_classLoader_False_True_None = _URILoader( - CommandLineTool_classLoader, False, True, None -) -array_of_CommandInputParameterLoader = _ArrayLoader(CommandInputParameterLoader) -idmap_inputs_array_of_CommandInputParameterLoader = _IdMapLoader( - array_of_CommandInputParameterLoader, "id", "type" -) -array_of_CommandOutputParameterLoader = _ArrayLoader(CommandOutputParameterLoader) -idmap_outputs_array_of_CommandOutputParameterLoader = _IdMapLoader( - array_of_CommandOutputParameterLoader, "id", "type" -) -union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( - ( - strtype, - ExpressionLoader, - CommandLineBindingLoader, - ) -) -array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( - _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) -) -union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( - ( - None_type, - array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, - ) -) -array_of_inttype = _ArrayLoader(inttype) -union_of_None_type_or_array_of_inttype = _UnionLoader( - ( - None_type, - array_of_inttype, - ) -) -DockerRequirement_classLoader = _EnumLoader( - ("DockerRequirement",), "DockerRequirement_class" -) -uri_DockerRequirement_classLoader_False_True_None = _URILoader( - DockerRequirement_classLoader, False, True, None -) -SoftwareRequirement_classLoader = _EnumLoader( - ("SoftwareRequirement",), "SoftwareRequirement_class" -) -uri_SoftwareRequirement_classLoader_False_True_None = _URILoader( - SoftwareRequirement_classLoader, False, True, None -) -array_of_SoftwarePackageLoader = _ArrayLoader(SoftwarePackageLoader) -idmap_packages_array_of_SoftwarePackageLoader = _IdMapLoader( - array_of_SoftwarePackageLoader, "package", "specs" -) -uri_union_of_None_type_or_array_of_strtype_False_False_None = _URILoader( - union_of_None_type_or_array_of_strtype, False, False, None -) -InitialWorkDirRequirement_classLoader = _EnumLoader( - ("InitialWorkDirRequirement",), "InitialWorkDirRequirement_class" -) -uri_InitialWorkDirRequirement_classLoader_False_True_None = _URILoader( - InitialWorkDirRequirement_classLoader, False, True, None -) -union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = _UnionLoader( - ( - FileLoader, - DirectoryLoader, - DirentLoader, - strtype, - ExpressionLoader, - ) -) -array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = _ArrayLoader( - union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader -) -union_of_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader_or_strtype_or_ExpressionLoader = _UnionLoader( - ( - array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader, - strtype, - ExpressionLoader, - ) -) -EnvVarRequirement_classLoader = _EnumLoader( - ("EnvVarRequirement",), "EnvVarRequirement_class" -) -uri_EnvVarRequirement_classLoader_False_True_None = _URILoader( - EnvVarRequirement_classLoader, False, True, None -) -array_of_EnvironmentDefLoader = _ArrayLoader(EnvironmentDefLoader) -idmap_envDef_array_of_EnvironmentDefLoader = _IdMapLoader( - array_of_EnvironmentDefLoader, "envName", "envValue" -) -ShellCommandRequirement_classLoader = _EnumLoader( - ("ShellCommandRequirement",), "ShellCommandRequirement_class" -) -uri_ShellCommandRequirement_classLoader_False_True_None = _URILoader( - ShellCommandRequirement_classLoader, False, True, None -) -ResourceRequirement_classLoader = _EnumLoader( - ("ResourceRequirement",), "ResourceRequirement_class" -) -uri_ResourceRequirement_classLoader_False_True_None = _URILoader( - ResourceRequirement_classLoader, False, True, None -) -union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader = _UnionLoader( - ( - None_type, - inttype, - strtype, - ExpressionLoader, - ) -) -union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( - ( - None_type, - CWLTypeLoader, - OutputRecordSchemaLoader, - OutputEnumSchemaLoader, - OutputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, - 2, -) -ExpressionTool_classLoader = _EnumLoader(("ExpressionTool",), "ExpressionTool_class") -uri_ExpressionTool_classLoader_False_True_None = _URILoader( - ExpressionTool_classLoader, False, True, None -) -array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( - ExpressionToolOutputParameterLoader -) -idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( - array_of_ExpressionToolOutputParameterLoader, "id", "type" -) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1 = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1 -) -union_of_None_type_or_LinkMergeMethodLoader = _UnionLoader( - ( - None_type, - LinkMergeMethodLoader, - ) -) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2 = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2 -) -array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader) -idmap_in__array_of_WorkflowStepInputLoader = _IdMapLoader( - array_of_WorkflowStepInputLoader, "id", "source" -) -union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( - ( - strtype, - WorkflowStepOutputLoader, - ) -) -array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader( - union_of_strtype_or_WorkflowStepOutputLoader -) -union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( - (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) -) -uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = ( - _URILoader( - union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, - True, - False, - None, - ) -) -array_of_Any_type = _ArrayLoader(Any_type) -union_of_None_type_or_array_of_Any_type = _UnionLoader( - ( - None_type, - array_of_Any_type, - ) -) -idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( - union_of_None_type_or_array_of_Any_type, "class", "None" -) -union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( - _UnionLoader( - ( - strtype, - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, - ) - ) -) -uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None = _URILoader( - union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, - False, - False, - None, -) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0 = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 0 -) -union_of_None_type_or_ScatterMethodLoader = _UnionLoader( - ( - None_type, - ScatterMethodLoader, - ) -) -uri_union_of_None_type_or_ScatterMethodLoader_False_True_None = _URILoader( - union_of_None_type_or_ScatterMethodLoader, False, True, None -) -Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") -uri_Workflow_classLoader_False_True_None = _URILoader( - Workflow_classLoader, False, True, None -) -array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) -idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( - array_of_WorkflowOutputParameterLoader, "id", "type" -) -array_of_WorkflowStepLoader = _ArrayLoader(WorkflowStepLoader) -union_of_array_of_WorkflowStepLoader = _UnionLoader((array_of_WorkflowStepLoader,)) -idmap_steps_union_of_array_of_WorkflowStepLoader = _IdMapLoader( - union_of_array_of_WorkflowStepLoader, "id", "None" -) -SubworkflowFeatureRequirement_classLoader = _EnumLoader( - ("SubworkflowFeatureRequirement",), "SubworkflowFeatureRequirement_class" -) -uri_SubworkflowFeatureRequirement_classLoader_False_True_None = _URILoader( - SubworkflowFeatureRequirement_classLoader, False, True, None -) -ScatterFeatureRequirement_classLoader = _EnumLoader( - ("ScatterFeatureRequirement",), "ScatterFeatureRequirement_class" -) -uri_ScatterFeatureRequirement_classLoader_False_True_None = _URILoader( - ScatterFeatureRequirement_classLoader, False, True, None -) -MultipleInputFeatureRequirement_classLoader = _EnumLoader( - ("MultipleInputFeatureRequirement",), "MultipleInputFeatureRequirement_class" -) -uri_MultipleInputFeatureRequirement_classLoader_False_True_None = _URILoader( - MultipleInputFeatureRequirement_classLoader, False, True, None -) -StepInputExpressionRequirement_classLoader = _EnumLoader( - ("StepInputExpressionRequirement",), "StepInputExpressionRequirement_class" -) -uri_StepInputExpressionRequirement_classLoader_False_True_None = _URILoader( - StepInputExpressionRequirement_classLoader, False, True, None -) -union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( - ( - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, - ) -) -array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( - _ArrayLoader( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader - ) -) -union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( - ( - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, - array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, - ) -) - - -def load_document( - doc: Any, - baseuri: Optional[str] = None, - loadingOptions: Optional[LoadingOptions] = None, -) -> Any: - if baseuri is None: - baseuri = file_uri(os.getcwd()) + "/" - if loadingOptions is None: - loadingOptions = LoadingOptions() - result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, - doc, - baseuri, - loadingOptions, - ) - return result - - -def load_document_with_metadata( - doc: Any, - baseuri: Optional[str] = None, - loadingOptions: Optional[LoadingOptions] = None, - addl_metadata_fields: Optional[MutableSequence[str]] = None, -) -> Any: - if baseuri is None: - baseuri = file_uri(os.getcwd()) + "/" - if loadingOptions is None: - loadingOptions = LoadingOptions(fileuri=baseuri) - return _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, - doc, - baseuri, - loadingOptions, - addl_metadata_fields=addl_metadata_fields, - ) - - -def load_document_by_string( - string: Any, - uri: str, - loadingOptions: Optional[LoadingOptions] = None, -) -> Any: - yaml = yaml_no_ts() - result = yaml.load(string) - add_lc_filename(result, uri) - - if loadingOptions is None: - loadingOptions = LoadingOptions(fileuri=uri) - - result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, - result, - uri, - loadingOptions, - ) - return result - - -def load_document_by_yaml( - yaml: Any, - uri: str, - loadingOptions: Optional[LoadingOptions] = None, -) -> Any: - """ - Shortcut to load via a YAML object. - yaml: must be from ruamel.yaml.main.YAML.load with preserve_quotes=True - """ - add_lc_filename(yaml, uri) - - if loadingOptions is None: - loadingOptions = LoadingOptions(fileuri=uri) - - result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, - yaml, - uri, - loadingOptions, - ) - return result diff --git a/schema_salad/tests/cwl_v1_1.py b/schema_salad/tests/cwl_v1_1.py deleted file mode 100644 index 816dd0e97..000000000 --- a/schema_salad/tests/cwl_v1_1.py +++ /dev/null @@ -1,24106 +0,0 @@ -# -# This file was autogenerated using schema-salad-tool --codegen=python -# The code itself is released under the Apache 2.0 license and the help text is -# subject to the license of the original schema. -import copy -import logging -import os -import pathlib -import re -import tempfile -import uuid as _uuid__ # pylint: disable=unused-import # noqa: F401 -import xml.sax # nosec -from abc import ABC, abstractmethod -from io import StringIO -from typing import ( - Any, - Dict, - List, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, - no_type_check, -) -from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit -from urllib.request import pathname2url - -from rdflib import Graph -from rdflib.plugins.parsers.notation3 import BadSyntax -from ruamel.yaml.comments import CommentedMap, CommentedSeq - -from schema_salad.exceptions import SchemaSaladException, ValidationException -from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher -from schema_salad.sourceline import SourceLine, add_lc_filename -from schema_salad.utils import CacheType, yaml_no_ts # requires schema-salad v8.2+ - -_vocab: Dict[str, str] = {} -_rvocab: Dict[str, str] = {} - -_logger = logging.getLogger("salad") - - -IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] - -doc_line_info = CommentedMap() - - -class LoadingOptions: - idx: IdxType - fileuri: Optional[str] - baseuri: str - namespaces: MutableMapping[str, str] - schemas: MutableSequence[str] - original_doc: Optional[Any] - addl_metadata: MutableMapping[str, Any] - fetcher: Fetcher - vocab: Dict[str, str] - rvocab: Dict[str, str] - cache: CacheType - imports: List[str] - includes: List[str] - - def __init__( - self, - fetcher: Optional[Fetcher] = None, - namespaces: Optional[Dict[str, str]] = None, - schemas: Optional[List[str]] = None, - fileuri: Optional[str] = None, - copyfrom: Optional["LoadingOptions"] = None, - original_doc: Optional[Any] = None, - addl_metadata: Optional[Dict[str, str]] = None, - baseuri: Optional[str] = None, - idx: Optional[IdxType] = None, - imports: Optional[List[str]] = None, - includes: Optional[List[str]] = None, - ) -> None: - """Create a LoadingOptions object.""" - self.original_doc = original_doc - - if idx is not None: - self.idx = idx - else: - self.idx = copyfrom.idx if copyfrom is not None else {} - - if fileuri is not None: - self.fileuri = fileuri - else: - self.fileuri = copyfrom.fileuri if copyfrom is not None else None - - if baseuri is not None: - self.baseuri = baseuri - else: - self.baseuri = copyfrom.baseuri if copyfrom is not None else "" - - if namespaces is not None: - self.namespaces = namespaces - else: - self.namespaces = copyfrom.namespaces if copyfrom is not None else {} - - if schemas is not None: - self.schemas = schemas - else: - self.schemas = copyfrom.schemas if copyfrom is not None else [] - - if addl_metadata is not None: - self.addl_metadata = addl_metadata - else: - self.addl_metadata = copyfrom.addl_metadata if copyfrom is not None else {} - - if imports is not None: - self.imports = imports - else: - self.imports = copyfrom.imports if copyfrom is not None else [] - - if includes is not None: - self.includes = includes - else: - self.includes = copyfrom.includes if copyfrom is not None else [] - - if fetcher is not None: - self.fetcher = fetcher - elif copyfrom is not None: - self.fetcher = copyfrom.fetcher - else: - import requests - from cachecontrol.caches import FileCache - from cachecontrol.wrapper import CacheControl - - root = pathlib.Path(os.environ.get("HOME", tempfile.gettempdir())) - session = CacheControl( - requests.Session(), - cache=FileCache(root / ".cache" / "salad"), - ) - self.fetcher: Fetcher = DefaultFetcher({}, session) - - self.cache = self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} - - self.vocab = _vocab - self.rvocab = _rvocab - - if namespaces is not None: - self.vocab = self.vocab.copy() - self.rvocab = self.rvocab.copy() - for k, v in namespaces.items(): - self.vocab[k] = v - self.rvocab[v] = k - - @property - def graph(self) -> Graph: - """Generate a merged rdflib.Graph from all entries in self.schemas.""" - graph = Graph() - if not self.schemas: - return graph - key = str(hash(tuple(self.schemas))) - if key in self.cache: - return cast(Graph, self.cache[key]) - for schema in self.schemas: - fetchurl = ( - self.fetcher.urljoin(self.fileuri, schema) - if self.fileuri is not None - else pathlib.Path(schema).resolve().as_uri() - ) - if fetchurl not in self.cache or self.cache[fetchurl] is True: - _logger.debug("Getting external schema %s", fetchurl) - try: - content = self.fetcher.fetch_text(fetchurl) - except Exception as e: - _logger.warning("Could not load extension schema %s: %s", fetchurl, str(e)) - continue - newGraph = Graph() - err_msg = "unknown error" - for fmt in ["xml", "turtle"]: - try: - newGraph.parse(data=content, format=fmt, publicID=str(fetchurl)) - self.cache[fetchurl] = newGraph - graph += newGraph - break - except (xml.sax.SAXParseException, TypeError, BadSyntax) as e: - err_msg = str(e) - else: - _logger.warning("Could not load extension schema %s: %s", fetchurl, err_msg) - self.cache[key] = graph - return graph - - -class Saveable(ABC): - """Mark classes than have a save() and fromDoc() function.""" - - @classmethod - @abstractmethod - def fromDoc( - cls, - _doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "Saveable": - """Construct this object from the result of yaml.load().""" - - @abstractmethod - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, - ) -> CommentedMap: - """Convert this object to a JSON/YAML friendly dictionary.""" - - -def load_field(val, fieldtype, baseuri, loadingOptions): - # type: (Union[str, Dict[str, str]], _Loader, str, LoadingOptions) -> Any - if isinstance(val, MutableMapping): - if "$import" in val: - if loadingOptions.fileuri is None: - raise SchemaSaladException("Cannot load $import without fileuri") - url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]) - result, metadata = _document_load_by_url( - fieldtype, - url, - loadingOptions, - ) - loadingOptions.imports.append(url) - return result - if "$include" in val: - if loadingOptions.fileuri is None: - raise SchemaSaladException("Cannot load $import without fileuri") - url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) - val = loadingOptions.fetcher.fetch_text(url) - loadingOptions.includes.append(url) - return fieldtype.load(val, baseuri, loadingOptions) - - -save_type = Optional[Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str]] - - -def add_kv( - old_doc: CommentedMap, - new_doc: CommentedMap, - line_numbers: Dict[Any, Dict[str, int]], - key: str, - val: Any, - max_len: int, - cols: Dict[int, int], - min_col: int = 0, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, -) -> Tuple[int, Optional[Dict[int, int]]]: - """Add key value pair into Commented Map. - - Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers - for each key/val pair in the old CommentedMap,key/val pair to insert, max_line of the old CommentedMap, - and max col value taken for each line. - """ - if inserted_line_info is None: - inserted_line_info = {} - - if len(inserted_line_info.keys()) >= 1: - max_line = max(inserted_line_info.keys()) + 1 - else: - max_line = 0 - - if key in line_numbers: # If the passed key to insert is in the original CommentedMap as a key - line_info = old_doc.lc.data[key] # Get the line information for the key - if ( - line_info[0] + shift not in inserted_line_info - ): # If the line of the key + shift isn't taken, add it - new_doc.lc.add_kv_line_col( - key, - [ - old_doc.lc.data[key][0] + shift, - old_doc.lc.data[key][1], - old_doc.lc.data[key][2] + shift, - old_doc.lc.data[key][3], - ], - ) - inserted_line_info[old_doc.lc.data[key][0] + shift] = old_doc.lc.data[key][1] - else: # If the line is already taken - line = line_info[0] + shift - while line in inserted_line_info.keys(): # Find the closest free line - line += 1 - new_doc.lc.add_kv_line_col( - key, - [ - line, - old_doc.lc.data[key][1], - line + (line - old_doc.lc.data[key][2]), - old_doc.lc.data[key][3], - ], - ) - inserted_line_info[line] = old_doc.lc.data[key][1] - return max_len, inserted_line_info - elif isinstance(val, (int, float, str)) and not isinstance( - val, bool - ): # If the value is hashable - if val in line_numbers: # If the value is in the original CommentedMap - line = line_numbers[val]["line"] + shift # Get the line info for the value - if line in inserted_line_info: # Get the appropriate line to place value on - line = max_line - - col = line_numbers[val]["col"] - new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) - inserted_line_info[line] = col + len(key) + 2 - return max_len, inserted_line_info - elif isinstance(val, str): # Logic for DSL expansition with "?" - if val + "?" in line_numbers: - line = line_numbers[val + "?"]["line"] + shift - if line in inserted_line_info: - line = max_line - col = line_numbers[val + "?"]["col"] - new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) - inserted_line_info[line] = col + len(key) + 2 - return max_len, inserted_line_info - elif old_doc: - if val in old_doc: - index = old_doc.lc.data.index(val) - line_info = old_doc.lc.data[index] - if line_info[0] + shift not in inserted_line_info: - new_doc.lc.add_kv_line_col( - key, - [ - old_doc.lc.data[index][0] + shift, - old_doc.lc.data[index][1], - old_doc.lc.data[index][2] + shift, - old_doc.lc.data[index][3], - ], - ) - inserted_line_info[old_doc.lc.data[index][0] + shift] = old_doc.lc.data[index][ - 1 - ] - else: - new_doc.lc.add_kv_line_col( - key, - [ - max_line + shift, - old_doc.lc.data[index][1], - max_line + (max_line - old_doc.lc.data[index][2]) + shift, - old_doc.lc.data[index][3], - ], - ) - inserted_line_info[max_line + shift] = old_doc.lc.data[index][1] - # If neither the key or value is in the original CommentedMap/old doc (or value is not hashable) - new_doc.lc.add_kv_line_col(key, [max_line, min_col, max_line, min_col + len(key) + 2]) - inserted_line_info[max_line] = min_col + len(key) + 2 - return max_len + 1, inserted_line_info - - -@no_type_check -def iterate_through_doc(keys: List[Any]) -> Optional[CommentedMap]: - """Take a list of keys/indexes and iterates through the global CommentedMap.""" - doc = doc_line_info - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - return None - else: - return None - if isinstance(doc, CommentedSeq): - to_return = CommentedMap() - for index, key in enumerate(doc): - to_return[key] = "" - to_return.lc.add_kv_line_col( - key, - [ - doc.lc.data[index][0], - doc.lc.data[index][1], - doc.lc.data[index][0], - doc.lc.data[index][1], - ], - ) - return to_return - return doc - - -def get_line_numbers(doc: Optional[CommentedMap]) -> Dict[Any, Dict[str, int]]: - """Get line numbers for kv pairs in CommentedMap. - - For each key/value pair in a CommentedMap, save the line/col info into a dictionary, - only save value info if value is hashable. - """ - line_numbers: Dict[Any, Dict[str, int]] = {} - if doc is None: - return {} - if doc.lc.data is None: - return {} - for key, value in doc.lc.data.items(): - line_numbers[key] = {} - - line_numbers[key]["line"] = doc.lc.data[key][0] - line_numbers[key]["col"] = doc.lc.data[key][1] - if isinstance(value, (int, float, bool, str)): - line_numbers[value] = {} - line_numbers[value]["line"] = doc.lc.data[key][2] - line_numbers[value]["col"] = doc.lc.data[key][3] - return line_numbers - - -def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> int: - """Given a array of line column information, get the minimum column.""" - min_col = 0 - for line in line_numbers: - if line_numbers[line]["col"] > min_col: - min_col = line_numbers[line]["col"] - return min_col - - -def get_max_line_num(doc: CommentedMap) -> int: - """Get the max line number for a CommentedMap. - - Iterate through the the key with the highest line number until you reach a non-CommentedMap value - or empty CommentedMap. - """ - max_line = 0 - max_key = "" - cur = doc - while isinstance(cur, CommentedMap) and len(cur) > 0: - for key in cur.lc.data.keys(): - if cur.lc.data[key][2] >= max_line: - max_line = cur.lc.data[key][2] - max_key = key - cur = cur[max_key] - return max_line + 1 - - -def save( - val: Any, - top: bool = True, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, -) -> save_type: - """Save a val of any type. - - Recursively calls save method from class if val is of type Saveable. - Otherwise, saves val to CommentedMap or CommentedSeq. - """ - if keys is None: - keys = [] - - doc = iterate_through_doc(keys) - - if isinstance(val, Saveable): - return val.save( - top=top, - base_url=base_url, - relative_uris=relative_uris, - keys=keys, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if isinstance(val, MutableSequence): - r = CommentedSeq() - r.lc.data = {} - for i in range(0, len(val)): - new_keys = keys - if doc: - if str(i) in doc: - r.lc.data[i] = doc.lc.data[i] - new_keys.append(i) - r.append( - save( - val[i], - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=new_keys, - inserted_line_info=inserted_line_info, - shift=shift, - ) - ) - return r - - if isinstance(val, MutableMapping): - newdict = CommentedMap() - new_keys = keys - for key in val: - - if doc: - if key in doc: - newdict.lc.add_kv_line_col(key, doc.lc.data[key]) - new_keys.append(key) - - newdict[key] = save( - val[key], - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=new_keys, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - return newdict - if val is None or isinstance(val, (int, float, bool, str)): - return val - raise Exception("Not Saveable: %s" % type(val)) - - -def save_with_metadata( - val: Any, - valLoadingOpts: LoadingOptions, - top: bool = True, - base_url: str = "", - relative_uris: bool = True, -) -> save_type: - """Save and set $namespaces, $schemas, $base and any other metadata fields at the top level.""" - saved_val = save(val, top, base_url, relative_uris) - newdict: MutableMapping[str, Any] = {} - if isinstance(saved_val, MutableSequence): - newdict = {"$graph": saved_val} - elif isinstance(saved_val, MutableMapping): - newdict = saved_val - - if valLoadingOpts.namespaces: - newdict["$namespaces"] = valLoadingOpts.namespaces - if valLoadingOpts.schemas: - newdict["$schemas"] = valLoadingOpts.schemas - if valLoadingOpts.baseuri: - newdict["$base"] = valLoadingOpts.baseuri - for k, v in valLoadingOpts.addl_metadata.items(): - if k not in newdict: - newdict[k] = v - - return newdict - - -def expand_url( - url, # type: str - base_url, # type: str - loadingOptions, # type: LoadingOptions - scoped_id=False, # type: bool - vocab_term=False, # type: bool - scoped_ref=None, # type: Optional[int] -): - # type: (...) -> str - if url in ("@id", "@type"): - return url - - if vocab_term and url in loadingOptions.vocab: - return url - - if bool(loadingOptions.vocab) and ":" in url: - prefix = url.split(":")[0] - if prefix in loadingOptions.vocab: - url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :] - - split = urlsplit(url) - - if ( - (bool(split.scheme) and split.scheme in loadingOptions.fetcher.supported_schemes()) - or url.startswith("$(") - or url.startswith("${") - ): - pass - elif scoped_id and not bool(split.fragment): - splitbase = urlsplit(base_url) - frg = "" - if bool(splitbase.fragment): - frg = splitbase.fragment + "/" + split.path - else: - frg = split.path - pt = splitbase.path if splitbase.path != "" else "/" - url = urlunsplit((splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg)) - elif scoped_ref is not None and not bool(split.fragment): - splitbase = urlsplit(base_url) - sp = splitbase.fragment.split("/") - n = scoped_ref - while n > 0 and len(sp) > 0: - sp.pop() - n -= 1 - sp.append(url) - url = urlunsplit( - ( - splitbase.scheme, - splitbase.netloc, - splitbase.path, - splitbase.query, - "/".join(sp), - ) - ) - else: - url = loadingOptions.fetcher.urljoin(base_url, url) - - if vocab_term: - split = urlsplit(url) - if bool(split.scheme): - if url in loadingOptions.rvocab: - return loadingOptions.rvocab[url] - else: - raise ValidationException(f"Term {url!r} not in vocabulary") - - return url - - -class _Loader: - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - pass - - -class _AnyLoader(_Loader): - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if doc is not None: - return doc - raise ValidationException("Expected non-null") - - -class _PrimitiveLoader(_Loader): - def __init__(self, tp): - # type: (Union[type, Tuple[Type[str], Type[str]]]) -> None - self.tp = tp - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if not isinstance(doc, self.tp): - raise ValidationException( - "Expected a {} but got {}".format( - self.tp.__class__.__name__, doc.__class__.__name__ - ) - ) - return doc - - def __repr__(self): # type: () -> str - return str(self.tp) - - -class _ArrayLoader(_Loader): - def __init__(self, items): - # type: (_Loader) -> None - self.items = items - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if not isinstance(doc, MutableSequence): - raise ValidationException(f"Expected a list, was {type(doc)}") - r = [] # type: List[Any] - errors = [] # type: List[SchemaSaladException] - for i in range(0, len(doc)): - try: - lf = load_field(doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions) - if isinstance(lf, MutableSequence): - r.extend(lf) - else: - r.append(lf) - except ValidationException as e: - errors.append(e.with_sourceline(SourceLine(doc, i, str))) - if errors: - raise ValidationException("", None, errors) - return r - - def __repr__(self): # type: () -> str - return f"array<{self.items}>" - - -class _EnumLoader(_Loader): - def __init__(self, symbols: Sequence[str], name: str) -> None: - self.symbols = symbols - self.name = name - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if doc in self.symbols: - return doc - raise ValidationException(f"Expected one of {self.symbols}") - - def __repr__(self): # type: () -> str - return self.name - - -class _SecondaryDSLLoader(_Loader): - def __init__(self, inner): - # type: (_Loader) -> None - self.inner = inner - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - r: List[Dict[str, Any]] = [] - if isinstance(doc, MutableSequence): - for d in doc: - if isinstance(d, str): - if d.endswith("?"): - r.append({"pattern": d[:-1], "required": False}) - else: - r.append({"pattern": d}) - elif isinstance(d, dict): - new_dict: Dict[str, Any] = {} - dict_copy = copy.deepcopy(d) - if "pattern" in dict_copy: - new_dict["pattern"] = dict_copy.pop("pattern") - else: - raise ValidationException( - f"Missing pattern in secondaryFiles specification entry: {d}" - ) - new_dict["required"] = ( - dict_copy.pop("required") if "required" in dict_copy else None - ) - - if len(dict_copy): - raise ValidationException( - "Unallowed values in secondaryFiles specification entry: {}".format( - dict_copy - ) - ) - r.append(new_dict) - - else: - raise ValidationException( - "Expected a string or sequence of (strings or mappings)." - ) - elif isinstance(doc, MutableMapping): - new_dict = {} - doc_copy = copy.deepcopy(doc) - if "pattern" in doc_copy: - new_dict["pattern"] = doc_copy.pop("pattern") - else: - raise ValidationException( - f"Missing pattern in secondaryFiles specification entry: {doc}" - ) - new_dict["required"] = doc_copy.pop("required") if "required" in doc_copy else None - - if len(doc_copy): - raise ValidationException( - f"Unallowed values in secondaryFiles specification entry: {doc_copy}" - ) - r.append(new_dict) - - elif isinstance(doc, str): - if doc.endswith("?"): - r.append({"pattern": doc[:-1], "required": False}) - else: - r.append({"pattern": doc}) - else: - raise ValidationException("Expected str or sequence of str") - return self.inner.load(r, baseuri, loadingOptions, docRoot) - - -class _RecordLoader(_Loader): - def __init__(self, classtype): - # type: (Type[Saveable]) -> None - self.classtype = classtype - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if not isinstance(doc, MutableMapping): - raise ValidationException(f"Expected a dict, was {type(doc)}") - return self.classtype.fromDoc(doc, baseuri, loadingOptions, docRoot=docRoot) - - def __repr__(self): # type: () -> str - return str(self.classtype.__name__) - - -class _ExpressionLoader(_Loader): - def __init__(self, items: Type[str]) -> None: - self.items = items - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if not isinstance(doc, str): - raise ValidationException(f"Expected a str, was {type(doc)}") - return doc - - -class _UnionLoader(_Loader): - def __init__(self, alternates: Sequence[_Loader]) -> None: - self.alternates = alternates - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - errors = [] - for t in self.alternates: - try: - return t.load(doc, baseuri, loadingOptions, docRoot=docRoot) - except ValidationException as e: - errors.append(ValidationException(f"tried {t} but", None, [e])) - raise ValidationException("", None, errors, "-") - - def __repr__(self): # type: () -> str - return " | ".join(str(a) for a in self.alternates) - - -class _URILoader(_Loader): - def __init__(self, inner, scoped_id, vocab_term, scoped_ref): - # type: (_Loader, bool, bool, Union[int, None]) -> None - self.inner = inner - self.scoped_id = scoped_id - self.vocab_term = vocab_term - self.scoped_ref = scoped_ref - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if isinstance(doc, MutableSequence): - newdoc = [] - for i in doc: - if isinstance(i, str): - newdoc.append( - expand_url( - i, - baseuri, - loadingOptions, - self.scoped_id, - self.vocab_term, - self.scoped_ref, - ) - ) - else: - newdoc.append(i) - doc = newdoc - elif isinstance(doc, str): - doc = expand_url( - doc, - baseuri, - loadingOptions, - self.scoped_id, - self.vocab_term, - self.scoped_ref, - ) - return self.inner.load(doc, baseuri, loadingOptions) - - -class _TypeDSLLoader(_Loader): - typeDSLregex = re.compile(r"^([^[?]+)(\[\])?(\?)?$") - - def __init__(self, inner, refScope): - # type: (_Loader, Union[int, None]) -> None - self.inner = inner - self.refScope = refScope - - def resolve( - self, - doc, # type: str - baseuri, # type: str - loadingOptions, # type: LoadingOptions - ): - # type: (...) -> Union[List[Union[Dict[str, str], str]], Dict[str, str], str] - m = self.typeDSLregex.match(doc) - if m: - group1 = m.group(1) - assert group1 is not None # nosec - first = expand_url(group1, baseuri, loadingOptions, False, True, self.refScope) - second = third = None - if bool(m.group(2)): - second = {"type": "array", "items": first} - # second = CommentedMap((("type", "array"), - # ("items", first))) - # second.lc.add_kv_line_col("type", lc) - # second.lc.add_kv_line_col("items", lc) - # second.lc.filename = filename - if bool(m.group(3)): - third = ["null", second or first] - # third = CommentedSeq(["null", second or first]) - # third.lc.add_kv_line_col(0, lc) - # third.lc.add_kv_line_col(1, lc) - # third.lc.filename = filename - return third or second or first - return doc - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if isinstance(doc, MutableSequence): - r = [] # type: List[Any] - for d in doc: - if isinstance(d, str): - resolved = self.resolve(d, baseuri, loadingOptions) - if isinstance(resolved, MutableSequence): - for i in resolved: - if i not in r: - r.append(i) - else: - if resolved not in r: - r.append(resolved) - else: - r.append(d) - doc = r - elif isinstance(doc, str): - doc = self.resolve(doc, baseuri, loadingOptions) - - return self.inner.load(doc, baseuri, loadingOptions) - - -class _IdMapLoader(_Loader): - def __init__(self, inner, mapSubject, mapPredicate): - # type: (_Loader, str, Union[str, None]) -> None - self.inner = inner - self.mapSubject = mapSubject - self.mapPredicate = mapPredicate - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if isinstance(doc, MutableMapping): - r = [] # type: List[Any] - for k in sorted(doc.keys()): - val = doc[k] - if isinstance(val, CommentedMap): - v = copy.copy(val) - v.lc.data = val.lc.data - v.lc.filename = val.lc.filename - v[self.mapSubject] = k - r.append(v) - elif isinstance(val, MutableMapping): - v2 = copy.copy(val) - v2[self.mapSubject] = k - r.append(v2) - else: - if self.mapPredicate: - v3 = {self.mapPredicate: val} - v3[self.mapSubject] = k - r.append(v3) - else: - raise ValidationException("No mapPredicate") - doc = r - return self.inner.load(doc, baseuri, loadingOptions) - - -def _document_load( - loader: _Loader, - doc: Union[CommentedMap, str, MutableMapping[str, Any], MutableSequence[Any]], - baseuri: str, - loadingOptions: LoadingOptions, - addl_metadata_fields: Optional[MutableSequence[str]] = None, -) -> Tuple[Any, LoadingOptions]: - if isinstance(doc, str): - return _document_load_by_url( - loader, - loadingOptions.fetcher.urljoin(baseuri, doc), - loadingOptions, - addl_metadata_fields=addl_metadata_fields, - ) - - if isinstance(doc, MutableMapping): - addl_metadata = {} - if addl_metadata_fields is not None: - for mf in addl_metadata_fields: - if mf in doc: - addl_metadata[mf] = doc[mf] - - docuri = baseuri - if "$base" in doc: - baseuri = doc["$base"] - - loadingOptions = LoadingOptions( - copyfrom=loadingOptions, - namespaces=doc.get("$namespaces", None), - schemas=doc.get("$schemas", None), - baseuri=doc.get("$base", None), - addl_metadata=addl_metadata, - ) - - doc = copy.copy(doc) - if "$namespaces" in doc: - doc.pop("$namespaces") - if "$schemas" in doc: - doc.pop("$schemas") - if "$base" in doc: - doc.pop("$base") - - if isinstance(doc, CommentedMap): - global doc_line_info - doc_line_info = doc - - if "$graph" in doc: - loadingOptions.idx[baseuri] = ( - loader.load(doc["$graph"], baseuri, loadingOptions), - loadingOptions, - ) - else: - loadingOptions.idx[baseuri] = ( - loader.load(doc, baseuri, loadingOptions, docRoot=baseuri), - loadingOptions, - ) - - if docuri != baseuri: - loadingOptions.idx[docuri] = loadingOptions.idx[baseuri] - - return loadingOptions.idx[baseuri] - if isinstance(doc, MutableSequence): - loadingOptions.idx[baseuri] = ( - loader.load(doc, baseuri, loadingOptions), - loadingOptions, - ) - return loadingOptions.idx[baseuri] - - raise ValidationException( - "Expected URI string, MutableMapping or MutableSequence, got %s" % type(doc) - ) - - -def _document_load_by_url( - loader: _Loader, - url: str, - loadingOptions: LoadingOptions, - addl_metadata_fields: Optional[MutableSequence[str]] = None, -) -> Tuple[Any, LoadingOptions]: - if url in loadingOptions.idx: - return loadingOptions.idx[url] - - doc_url, frg = urldefrag(url) - - text = loadingOptions.fetcher.fetch_text(doc_url) - textIO = StringIO(text) - textIO.name = str(doc_url) - yaml = yaml_no_ts() - result = yaml.load(textIO) - add_lc_filename(result, doc_url) - - loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=doc_url) - - _document_load( - loader, - result, - doc_url, - loadingOptions, - addl_metadata_fields=addl_metadata_fields, - ) - - return loadingOptions.idx[url] - - -def file_uri(path, split_frag=False): # type: (str, bool) -> str - if path.startswith("file://"): - return path - if split_frag: - pathsp = path.split("#", 2) - frag = "#" + quote(str(pathsp[1])) if len(pathsp) == 2 else "" - urlpath = pathname2url(str(pathsp[0])) - else: - urlpath = pathname2url(path) - frag = "" - if urlpath.startswith("//"): - return f"file:{urlpath}{frag}" - return f"file://{urlpath}{frag}" - - -def prefix_url(url: str, namespaces: Dict[str, str]) -> str: - """Expand short forms into full URLs using the given namespace dictionary.""" - for k, v in namespaces.items(): - if url.startswith(v): - return k + ":" + url[len(v) :] - return url - - -def save_relative_uri( - uri: Any, - base_url: str, - scoped_id: bool, - ref_scope: Optional[int], - relative_uris: bool, -) -> Any: - """Convert any URI to a relative one, obeying the scoping rules.""" - if isinstance(uri, MutableSequence): - return [save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) for u in uri] - elif isinstance(uri, str): - if not relative_uris or uri == base_url: - return uri - urisplit = urlsplit(uri) - basesplit = urlsplit(base_url) - if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: - if urisplit.path != basesplit.path: - p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) - if urisplit.fragment: - p = p + "#" + urisplit.fragment - return p - - basefrag = basesplit.fragment + "/" - if ref_scope: - sp = basefrag.split("/") - i = 0 - while i < ref_scope: - sp.pop() - i += 1 - basefrag = "/".join(sp) - - if urisplit.fragment.startswith(basefrag): - return urisplit.fragment[len(basefrag) :] - return urisplit.fragment - return uri - else: - return save(uri, top=False, base_url=base_url, relative_uris=relative_uris) - - -def shortname(inputid: str) -> str: - """ - Compute the shortname of a fully qualified identifier. - - See https://w3id.org/cwl/v1.2/SchemaSalad.html#Short_names. - """ - parsed_id = urlparse(inputid) - if parsed_id.fragment: - return parsed_id.fragment.split("/")[-1] - return parsed_id.path.split("/")[-1] - - -def parser_info() -> str: - return "org.w3id.cwl.v1_1" - - -class Documented(Saveable): - pass - - -class RecordField(Documented): - """ - A field of a record. - """ - - def __init__( - self, - name: Any, - type: Any, - doc: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.doc = doc - self.name = name - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, RecordField): - return bool( - self.doc == other.doc - and self.name == other.name - and self.type == other.type - ) - return False - - def __hash__(self) -> int: - return hash((self.doc, self.name, self.type)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "RecordField": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - raise ValidationException("Missing name") - if not __original_name_is_none: - baseuri = name - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'RecordField'", None, _errors__) - _constructed = cls( - doc=doc, - name=name, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["doc", "name", "type"]) - - -class RecordSchema(Saveable): - def __init__( - self, - type: Any, - fields: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, RecordSchema): - return bool(self.fields == other.fields and self.type == other.type) - return False - - def __hash__(self) -> int: - return hash((self.fields, self.type)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "RecordSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'fields' field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - ) - ) - else: - fields = None - try: - type = load_field( - _doc.get("type"), - typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'RecordSchema'", None, _errors__) - _constructed = cls( - fields=fields, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.fields is not None and "fields" not in r: - r["fields"] = save( - self.fields, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="fields", - val=r.get("fields"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type"]) - - -class EnumSchema(Saveable): - """ - Define an enumerated type. - - """ - - def __init__( - self, - symbols: Any, - type: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.symbols = symbols - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, EnumSchema): - return bool(self.symbols == other.symbols and self.type == other.type) - return False - - def __hash__(self) -> int: - return hash((self.symbols, self.type)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "EnumSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'symbols' field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'EnumSchema'", None, _errors__) - _constructed = cls( - symbols=symbols, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.symbols is not None and "symbols" not in r: - u = save_relative_uri(self.symbols, base_url, True, None, relative_uris) - r["symbols"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="symbols", - val=r.get("symbols"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["symbols", "type"]) - - -class ArraySchema(Saveable): - def __init__( - self, - items: Any, - type: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ArraySchema): - return bool(self.items == other.items and self.type == other.type) - return False - - def __hash__(self) -> int: - return hash((self.items, self.type)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ArraySchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - items = load_field( - _doc.get("items"), - typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'items' field is not valid because:", - SourceLine(_doc, "items", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'ArraySchema'", None, _errors__) - _constructed = cls( - items=items, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.items is not None and "items" not in r: - r["items"] = save( - self.items, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="items", - val=r.get("items"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type"]) - - -class File(Saveable): - """ - Represents a file (or group of files when `secondaryFiles` is provided) that - will be accessible by tools using standard POSIX file system call API such as - open(2) and read(2). - - Files are represented as objects with `class` of `File`. File objects have - a number of properties that provide metadata about the file. - - The `location` property of a File is a URI that uniquely identifies the - file. Implementations must support the file:// URI scheme and may support - other schemes such as http://. The value of `location` may also be a - relative reference, in which case it must be resolved relative to the URI - of the document it appears in. Alternately to `location`, implementations - must also accept the `path` property on File, which must be a filesystem - path available on the same host as the CWL runner (for inputs) or the - runtime environment of a command line tool execution (for command line tool - outputs). - - If no `location` or `path` is specified, a file object must specify - `contents` with the UTF-8 text content of the file. This is a "file - literal". File literals do not correspond to external resources, but are - created on disk with `contents` with when needed for a executing a tool. - Where appropriate, expressions can return file literals to define new files - on a runtime. The maximum size of `contents` is 64 kilobytes. - - The `basename` property defines the filename on disk where the file is - staged. This may differ from the resource name. If not provided, - `basename` must be computed from the last path part of `location` and made - available to expressions. - - The `secondaryFiles` property is a list of File or Directory objects that - must be staged in the same directory as the primary file. It is an error - for file names to be duplicated in `secondaryFiles`. - - The `size` property is the size in bytes of the File. It must be computed - from the resource and made available to expressions. The `checksum` field - contains a cryptographic hash of the file content for use it verifying file - contents. Implementations may, at user option, enable or disable - computation of the `checksum` field for performance or other reasons. - However, the ability to compute output checksums is required to pass the - CWL conformance test suite. - - When executing a CommandLineTool, the files and secondary files may be - staged to an arbitrary directory, but must use the value of `basename` for - the filename. The `path` property must be file path in the context of the - tool execution runtime (local to the compute node, or within the executing - container). All computed properties should be available to expressions. - File literals also must be staged and `path` must be set. - - When collecting CommandLineTool outputs, `glob` matching returns file paths - (with the `path` property) and the derived properties. This can all be - modified by `outputEval`. Alternately, if the file `cwl.output.json` is - present in the output, `outputBinding` is ignored. - - File objects in the output must provide either a `location` URI or a `path` - property in the context of the tool execution runtime (local to the compute - node, or within the executing container). - - When evaluating an ExpressionTool, file objects must be referenced via - `location` (the expression tool does not have access to files on disk so - `path` is meaningless) or as file literals. It is legal to return a file - object with an existing `location` but a different `basename`. The - `loadContents` field of ExpressionTool inputs behaves the same as on - CommandLineTool inputs, however it is not meaningful on the outputs. - - An ExpressionTool may forward file references from input to output by using - the same value for `location`. - - """ - - def __init__( - self, - location: Optional[Any] = None, - path: Optional[Any] = None, - basename: Optional[Any] = None, - dirname: Optional[Any] = None, - nameroot: Optional[Any] = None, - nameext: Optional[Any] = None, - checksum: Optional[Any] = None, - size: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - format: Optional[Any] = None, - contents: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "File" - self.location = location - self.path = path - self.basename = basename - self.dirname = dirname - self.nameroot = nameroot - self.nameext = nameext - self.checksum = checksum - self.size = size - self.secondaryFiles = secondaryFiles - self.format = format - self.contents = contents - - def __eq__(self, other: Any) -> bool: - if isinstance(other, File): - return bool( - self.class_ == other.class_ - and self.location == other.location - and self.path == other.path - and self.basename == other.basename - and self.dirname == other.dirname - and self.nameroot == other.nameroot - and self.nameext == other.nameext - and self.checksum == other.checksum - and self.size == other.size - and self.secondaryFiles == other.secondaryFiles - and self.format == other.format - and self.contents == other.contents - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.class_, - self.location, - self.path, - self.basename, - self.dirname, - self.nameroot, - self.nameext, - self.checksum, - self.size, - self.secondaryFiles, - self.format, - self.contents, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "File": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "File": - raise ValidationException("Not a File") - - if "location" in _doc: - try: - location = load_field( - _doc.get("location"), - uri_union_of_None_type_or_strtype_False_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'location' field is not valid because:", - SourceLine(_doc, "location", str), - [e], - ) - ) - else: - location = None - if "path" in _doc: - try: - path = load_field( - _doc.get("path"), - uri_union_of_None_type_or_strtype_False_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'path' field is not valid because:", - SourceLine(_doc, "path", str), - [e], - ) - ) - else: - path = None - if "basename" in _doc: - try: - basename = load_field( - _doc.get("basename"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'basename' field is not valid because:", - SourceLine(_doc, "basename", str), - [e], - ) - ) - else: - basename = None - if "dirname" in _doc: - try: - dirname = load_field( - _doc.get("dirname"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dirname' field is not valid because:", - SourceLine(_doc, "dirname", str), - [e], - ) - ) - else: - dirname = None - if "nameroot" in _doc: - try: - nameroot = load_field( - _doc.get("nameroot"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'nameroot' field is not valid because:", - SourceLine(_doc, "nameroot", str), - [e], - ) - ) - else: - nameroot = None - if "nameext" in _doc: - try: - nameext = load_field( - _doc.get("nameext"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'nameext' field is not valid because:", - SourceLine(_doc, "nameext", str), - [e], - ) - ) - else: - nameext = None - if "checksum" in _doc: - try: - checksum = load_field( - _doc.get("checksum"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'checksum' field is not valid because:", - SourceLine(_doc, "checksum", str), - [e], - ) - ) - else: - checksum = None - if "size" in _doc: - try: - size = load_field( - _doc.get("size"), - union_of_None_type_or_inttype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'size' field is not valid because:", - SourceLine(_doc, "size", str), - [e], - ) - ) - else: - size = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "contents" in _doc: - try: - contents = load_field( - _doc.get("contents"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'contents' field is not valid because:", - SourceLine(_doc, "contents", str), - [e], - ) - ) - else: - contents = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `dirname`, `nameroot`, `nameext`, `checksum`, `size`, `secondaryFiles`, `format`, `contents`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'File'", None, _errors__) - _constructed = cls( - location=location, - path=path, - basename=basename, - dirname=dirname, - nameroot=nameroot, - nameext=nameext, - checksum=checksum, - size=size, - secondaryFiles=secondaryFiles, - format=format, - contents=contents, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "File" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.location is not None and "location" not in r: - u = save_relative_uri(self.location, base_url, False, None, relative_uris) - r["location"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="location", - val=r.get("location"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.path is not None and "path" not in r: - u = save_relative_uri(self.path, base_url, False, None, relative_uris) - r["path"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="path", - val=r.get("path"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.basename is not None and "basename" not in r: - r["basename"] = save( - self.basename, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="basename", - val=r.get("basename"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dirname is not None and "dirname" not in r: - r["dirname"] = save( - self.dirname, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dirname", - val=r.get("dirname"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.nameroot is not None and "nameroot" not in r: - r["nameroot"] = save( - self.nameroot, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="nameroot", - val=r.get("nameroot"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.nameext is not None and "nameext" not in r: - r["nameext"] = save( - self.nameext, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="nameext", - val=r.get("nameext"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.checksum is not None and "checksum" not in r: - r["checksum"] = save( - self.checksum, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="checksum", - val=r.get("checksum"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.size is not None and "size" not in r: - r["size"] = save( - self.size, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="size", - val=r.get("size"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, base_url, True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.contents is not None and "contents" not in r: - r["contents"] = save( - self.contents, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="contents", - val=r.get("contents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "class", - "location", - "path", - "basename", - "dirname", - "nameroot", - "nameext", - "checksum", - "size", - "secondaryFiles", - "format", - "contents", - ] - ) - - -class Directory(Saveable): - """ - Represents a directory to present to a command line tool. - - Directories are represented as objects with `class` of `Directory`. Directory objects have - a number of properties that provide metadata about the directory. - - The `location` property of a Directory is a URI that uniquely identifies - the directory. Implementations must support the file:// URI scheme and may - support other schemes such as http://. Alternately to `location`, - implementations must also accept the `path` property on Directory, which - must be a filesystem path available on the same host as the CWL runner (for - inputs) or the runtime environment of a command line tool execution (for - command line tool outputs). - - A Directory object may have a `listing` field. This is a list of File and - Directory objects that are contained in the Directory. For each entry in - `listing`, the `basename` property defines the name of the File or - Subdirectory when staged to disk. If `listing` is not provided, the - implementation must have some way of fetching the Directory listing at - runtime based on the `location` field. - - If a Directory does not have `location`, it is a Directory literal. A - Directory literal must provide `listing`. Directory literals must be - created on disk at runtime as needed. - - The resources in a Directory literal do not need to have any implied - relationship in their `location`. For example, a Directory listing may - contain two files located on different hosts. It is the responsibility of - the runtime to ensure that those files are staged to disk appropriately. - Secondary files associated with files in `listing` must also be staged to - the same Directory. - - When executing a CommandLineTool, Directories must be recursively staged - first and have local values of `path` assigend. - - Directory objects in CommandLineTool output must provide either a - `location` URI or a `path` property in the context of the tool execution - runtime (local to the compute node, or within the executing container). - - An ExpressionTool may forward file references from input to output by using - the same value for `location`. - - Name conflicts (the same `basename` appearing multiple times in `listing` - or in any entry in `secondaryFiles` in the listing) is a fatal error. - - """ - - def __init__( - self, - location: Optional[Any] = None, - path: Optional[Any] = None, - basename: Optional[Any] = None, - listing: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "Directory" - self.location = location - self.path = path - self.basename = basename - self.listing = listing - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Directory): - return bool( - self.class_ == other.class_ - and self.location == other.location - and self.path == other.path - and self.basename == other.basename - and self.listing == other.listing - ) - return False - - def __hash__(self) -> int: - return hash( - (self.class_, self.location, self.path, self.basename, self.listing) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "Directory": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "Directory": - raise ValidationException("Not a Directory") - - if "location" in _doc: - try: - location = load_field( - _doc.get("location"), - uri_union_of_None_type_or_strtype_False_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'location' field is not valid because:", - SourceLine(_doc, "location", str), - [e], - ) - ) - else: - location = None - if "path" in _doc: - try: - path = load_field( - _doc.get("path"), - uri_union_of_None_type_or_strtype_False_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'path' field is not valid because:", - SourceLine(_doc, "path", str), - [e], - ) - ) - else: - path = None - if "basename" in _doc: - try: - basename = load_field( - _doc.get("basename"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'basename' field is not valid because:", - SourceLine(_doc, "basename", str), - [e], - ) - ) - else: - basename = None - if "listing" in _doc: - try: - listing = load_field( - _doc.get("listing"), - union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'listing' field is not valid because:", - SourceLine(_doc, "listing", str), - [e], - ) - ) - else: - listing = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `listing`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'Directory'", None, _errors__) - _constructed = cls( - location=location, - path=path, - basename=basename, - listing=listing, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "Directory" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.location is not None and "location" not in r: - u = save_relative_uri(self.location, base_url, False, None, relative_uris) - r["location"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="location", - val=r.get("location"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.path is not None and "path" not in r: - u = save_relative_uri(self.path, base_url, False, None, relative_uris) - r["path"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="path", - val=r.get("path"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.basename is not None and "basename" not in r: - r["basename"] = save( - self.basename, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="basename", - val=r.get("basename"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.listing is not None and "listing" not in r: - r["listing"] = save( - self.listing, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="listing", - val=r.get("listing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "location", "path", "basename", "listing"]) - - -class Labeled(Saveable): - pass - - -class Identified(Saveable): - pass - - -class LoadContents(Saveable): - pass - - -class FieldBase(Labeled): - pass - - -class InputFormat(Saveable): - pass - - -class OutputFormat(Saveable): - pass - - -class Parameter(FieldBase, Documented, Identified): - """ - Define an input or output parameter to a process. - - """ - - pass - - -class InputBinding(Saveable): - def __init__( - self, - loadContents: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.loadContents = loadContents - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InputBinding): - return bool(self.loadContents == other.loadContents) - return False - - def __hash__(self) -> int: - return hash((self.loadContents)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InputBinding": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `loadContents`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'InputBinding'", None, _errors__) - _constructed = cls( - loadContents=loadContents, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["loadContents"]) - - -class IOSchema(Labeled, Documented): - pass - - -class InputSchema(IOSchema): - pass - - -class OutputSchema(IOSchema): - pass - - -class InputRecordField(RecordField, FieldBase, InputFormat, LoadContents): - def __init__( - self, - name: Any, - type: Any, - doc: Optional[Any] = None, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.doc = doc - self.name = name - self.type = type - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.format = format - self.loadContents = loadContents - self.loadListing = loadListing - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InputRecordField): - return bool( - self.doc == other.doc - and self.name == other.name - and self.type == other.type - and self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.format == other.format - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.doc, - self.name, - self.type, - self.label, - self.secondaryFiles, - self.streamable, - self.format, - self.loadContents, - self.loadListing, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InputRecordField": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - raise ValidationException("Missing name") - if not __original_name_is_none: - baseuri = name - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadListing' field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - ) - ) - else: - loadListing = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'InputRecordField'", None, _errors__) - _constructed = cls( - doc=doc, - name=name, - type=type, - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - format=format, - loadContents=loadContents, - loadListing=loadListing, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri( - self.format, str(self.name), True, None, relative_uris - ) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadListing is not None and "loadListing" not in r: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadListing", - val=r.get("loadListing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "doc", - "name", - "type", - "label", - "secondaryFiles", - "streamable", - "format", - "loadContents", - "loadListing", - ] - ) - - -class InputRecordSchema(RecordSchema, InputSchema): - def __init__( - self, - type: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type = type - self.label = label - self.doc = doc - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InputRecordSchema): - return bool( - self.fields == other.fields - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.fields, self.type, self.label, self.doc, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InputRecordSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'fields' field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - ) - ) - else: - fields = None - try: - type = load_field( - _doc.get("type"), - typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'InputRecordSchema'", None, _errors__) - _constructed = cls( - fields=fields, - type=type, - label=label, - doc=doc, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.fields is not None and "fields" not in r: - r["fields"] = save( - self.fields, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="fields", - val=r.get("fields"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type", "label", "doc", "name"]) - - -class InputEnumSchema(EnumSchema, InputSchema): - def __init__( - self, - symbols: Any, - type: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.symbols = symbols - self.type = type - self.label = label - self.doc = doc - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InputEnumSchema): - return bool( - self.symbols == other.symbols - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.symbols, self.type, self.label, self.doc, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InputEnumSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'symbols' field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`, `label`, `doc`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'InputEnumSchema'", None, _errors__) - _constructed = cls( - symbols=symbols, - type=type, - label=label, - doc=doc, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) - r["symbols"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="symbols", - val=r.get("symbols"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["symbols", "type", "label", "doc", "name"]) - - -class InputArraySchema(ArraySchema, InputSchema): - def __init__( - self, - items: Any, - type: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type = type - self.label = label - self.doc = doc - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InputArraySchema): - return bool( - self.items == other.items - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.items, self.type, self.label, self.doc, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InputArraySchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - items = load_field( - _doc.get("items"), - typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'items' field is not valid because:", - SourceLine(_doc, "items", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'InputArraySchema'", None, _errors__) - _constructed = cls( - items=items, - type=type, - label=label, - doc=doc, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.items is not None and "items" not in r: - r["items"] = save( - self.items, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="items", - val=r.get("items"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type", "label", "doc", "name"]) - - -class OutputRecordField(RecordField, FieldBase, OutputFormat): - def __init__( - self, - name: Any, - type: Any, - doc: Optional[Any] = None, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - format: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.doc = doc - self.name = name - self.type = type - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.format = format - - def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputRecordField): - return bool( - self.doc == other.doc - and self.name == other.name - and self.type == other.type - and self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.format == other.format - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.doc, - self.name, - self.type, - self.label, - self.secondaryFiles, - self.streamable, - self.format, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "OutputRecordField": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - raise ValidationException("Missing name") - if not __original_name_is_none: - baseuri = name - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'OutputRecordField'", None, _errors__) - _constructed = cls( - doc=doc, - name=name, - type=type, - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - format=format, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri( - self.format, str(self.name), True, None, relative_uris - ) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - ["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"] - ) - - -class OutputRecordSchema(RecordSchema, OutputSchema): - def __init__( - self, - type: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type = type - self.label = label - self.doc = doc - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputRecordSchema): - return bool( - self.fields == other.fields - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.fields, self.type, self.label, self.doc, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "OutputRecordSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'fields' field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - ) - ) - else: - fields = None - try: - type = load_field( - _doc.get("type"), - typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'OutputRecordSchema'", None, _errors__) - _constructed = cls( - fields=fields, - type=type, - label=label, - doc=doc, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.fields is not None and "fields" not in r: - r["fields"] = save( - self.fields, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="fields", - val=r.get("fields"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type", "label", "doc", "name"]) - - -class OutputEnumSchema(EnumSchema, OutputSchema): - def __init__( - self, - symbols: Any, - type: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.symbols = symbols - self.type = type - self.label = label - self.doc = doc - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputEnumSchema): - return bool( - self.symbols == other.symbols - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.symbols, self.type, self.label, self.doc, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "OutputEnumSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'symbols' field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`, `label`, `doc`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'OutputEnumSchema'", None, _errors__) - _constructed = cls( - symbols=symbols, - type=type, - label=label, - doc=doc, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) - r["symbols"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="symbols", - val=r.get("symbols"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["symbols", "type", "label", "doc", "name"]) - - -class OutputArraySchema(ArraySchema, OutputSchema): - def __init__( - self, - items: Any, - type: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type = type - self.label = label - self.doc = doc - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputArraySchema): - return bool( - self.items == other.items - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.items, self.type, self.label, self.doc, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "OutputArraySchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - items = load_field( - _doc.get("items"), - typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'items' field is not valid because:", - SourceLine(_doc, "items", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'OutputArraySchema'", None, _errors__) - _constructed = cls( - items=items, - type=type, - label=label, - doc=doc, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.items is not None and "items" not in r: - r["items"] = save( - self.items, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="items", - val=r.get("items"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type", "label", "doc", "name"]) - - -class InputParameter(Parameter, InputFormat, LoadContents): - pass - - -class OutputParameter(Parameter, OutputFormat): - pass - - -class ProcessRequirement(Saveable): - """ - A process requirement declares a prerequisite that may or must be fulfilled - before executing a process. See [`Process.hints`](#process) and - [`Process.requirements`](#process). - - Process requirements are the primary mechanism for specifying extensions to - the CWL core specification. - - """ - - pass - - -class Process(Identified, Labeled, Documented): - """ - - The base executable type in CWL is the `Process` object defined by the - document. Note that the `Process` object is abstract and cannot be - directly executed. - - """ - - pass - - -class InlineJavascriptRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support inline Javascript expressions. - If this requirement is not present, the workflow platform must not perform expression - interpolatation. - - """ - - def __init__( - self, - expressionLib: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "InlineJavascriptRequirement" - self.expressionLib = expressionLib - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InlineJavascriptRequirement): - return bool( - self.class_ == other.class_ - and self.expressionLib == other.expressionLib - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.expressionLib)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InlineJavascriptRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "InlineJavascriptRequirement": - raise ValidationException("Not a InlineJavascriptRequirement") - - if "expressionLib" in _doc: - try: - expressionLib = load_field( - _doc.get("expressionLib"), - union_of_None_type_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'expressionLib' field is not valid because:", - SourceLine(_doc, "expressionLib", str), - [e], - ) - ) - else: - expressionLib = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `expressionLib`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'InlineJavascriptRequirement'", None, _errors__ - ) - _constructed = cls( - expressionLib=expressionLib, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "InlineJavascriptRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.expressionLib is not None and "expressionLib" not in r: - r["expressionLib"] = save( - self.expressionLib, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="expressionLib", - val=r.get("expressionLib"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "expressionLib"]) - - -class CommandInputSchema(Saveable): - pass - - -class SchemaDefRequirement(ProcessRequirement): - """ - This field consists of an array of type definitions which must be used when - interpreting the `inputs` and `outputs` fields. When a `type` field - contain a IRI, the implementation must check if the type is defined in - `schemaDefs` and use that definition. If the type is not found in - `schemaDefs`, it is an error. The entries in `schemaDefs` must be - processed in the order listed such that later schema definitions may refer - to earlier schema definitions. - - """ - - def __init__( - self, - types: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "SchemaDefRequirement" - self.types = types - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SchemaDefRequirement): - return bool(self.class_ == other.class_ and self.types == other.types) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.types)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "SchemaDefRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "SchemaDefRequirement": - raise ValidationException("Not a SchemaDefRequirement") - - try: - types = load_field( - _doc.get("types"), - array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'types' field is not valid because:", - SourceLine(_doc, "types", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `types`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'SchemaDefRequirement'", None, _errors__) - _constructed = cls( - types=types, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "SchemaDefRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.types is not None and "types" not in r: - r["types"] = save( - self.types, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="types", - val=r.get("types"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "types"]) - - -class SecondaryFileSchema(Saveable): - def __init__( - self, - pattern: Any, - required: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.pattern = pattern - self.required = required - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SecondaryFileSchema): - return bool( - self.pattern == other.pattern and self.required == other.required - ) - return False - - def __hash__(self) -> int: - return hash((self.pattern, self.required)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "SecondaryFileSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - pattern = load_field( - _doc.get("pattern"), - union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'pattern' field is not valid because:", - SourceLine(_doc, "pattern", str), - [e], - ) - ) - if "required" in _doc: - try: - required = load_field( - _doc.get("required"), - union_of_None_type_or_booltype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'required' field is not valid because:", - SourceLine(_doc, "required", str), - [e], - ) - ) - else: - required = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `pattern`, `required`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'SecondaryFileSchema'", None, _errors__) - _constructed = cls( - pattern=pattern, - required=required, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.pattern is not None and "pattern" not in r: - r["pattern"] = save( - self.pattern, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="pattern", - val=r.get("pattern"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.required is not None and "required" not in r: - r["required"] = save( - self.required, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="required", - val=r.get("required"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["pattern", "required"]) - - -class LoadListingRequirement(ProcessRequirement): - """ - Specify the desired behavior for loading the `listing` field of - a Directory object for use by expressions. - - """ - - def __init__( - self, - loadListing: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "LoadListingRequirement" - self.loadListing = loadListing - - def __eq__(self, other: Any) -> bool: - if isinstance(other, LoadListingRequirement): - return bool( - self.class_ == other.class_ and self.loadListing == other.loadListing - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.loadListing)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "LoadListingRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "LoadListingRequirement": - raise ValidationException("Not a LoadListingRequirement") - - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadListing' field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - ) - ) - else: - loadListing = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `loadListing`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'LoadListingRequirement'", None, _errors__ - ) - _constructed = cls( - loadListing=loadListing, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "LoadListingRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.loadListing is not None and "loadListing" not in r: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadListing", - val=r.get("loadListing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "loadListing"]) - - -class EnvironmentDef(Saveable): - """ - Define an environment variable that will be set in the runtime environment - by the workflow platform when executing the command line tool. May be the - result of executing an expression, such as getting a parameter from input. - - """ - - def __init__( - self, - envName: Any, - envValue: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.envName = envName - self.envValue = envValue - - def __eq__(self, other: Any) -> bool: - if isinstance(other, EnvironmentDef): - return bool( - self.envName == other.envName and self.envValue == other.envValue - ) - return False - - def __hash__(self) -> int: - return hash((self.envName, self.envValue)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "EnvironmentDef": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - envName = load_field( - _doc.get("envName"), - strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'envName' field is not valid because:", - SourceLine(_doc, "envName", str), - [e], - ) - ) - try: - envValue = load_field( - _doc.get("envValue"), - union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'envValue' field is not valid because:", - SourceLine(_doc, "envValue", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `envName`, `envValue`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'EnvironmentDef'", None, _errors__) - _constructed = cls( - envName=envName, - envValue=envValue, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.envName is not None and "envName" not in r: - r["envName"] = save( - self.envName, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="envName", - val=r.get("envName"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.envValue is not None and "envValue" not in r: - r["envValue"] = save( - self.envValue, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="envValue", - val=r.get("envValue"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["envName", "envValue"]) - - -class CommandLineBinding(InputBinding): - """ - - When listed under `inputBinding` in the input schema, the term - "value" refers to the the corresponding value in the input object. For - binding objects listed in `CommandLineTool.arguments`, the term "value" - refers to the effective value after evaluating `valueFrom`. - - The binding behavior when building the command line depends on the data - type of the value. If there is a mismatch between the type described by - the input schema and the effective value, such as resulting from an - expression evaluation, an implementation must use the data type of the - effective value. - - - **string**: Add `prefix` and the string to the command line. - - - **number**: Add `prefix` and decimal representation to command line. - - - **boolean**: If true, add `prefix` to the command line. If false, add - nothing. - - - **File**: Add `prefix` and the value of - [`File.path`](#File) to the command line. - - - **Directory**: Add `prefix` and the value of - [`Directory.path`](#Directory) to the command line. - - - **array**: If `itemSeparator` is specified, add `prefix` and the join - the array into a single string with `itemSeparator` separating the - items. Otherwise first add `prefix`, then recursively process - individual elements. - If the array is empty, it does not add anything to command line. - - - **object**: Add `prefix` only, and recursively add object fields for - which `inputBinding` is specified. - - - **null**: Add nothing. - - """ - - def __init__( - self, - loadContents: Optional[Any] = None, - position: Optional[Any] = None, - prefix: Optional[Any] = None, - separate: Optional[Any] = None, - itemSeparator: Optional[Any] = None, - valueFrom: Optional[Any] = None, - shellQuote: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.loadContents = loadContents - self.position = position - self.prefix = prefix - self.separate = separate - self.itemSeparator = itemSeparator - self.valueFrom = valueFrom - self.shellQuote = shellQuote - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandLineBinding): - return bool( - self.loadContents == other.loadContents - and self.position == other.position - and self.prefix == other.prefix - and self.separate == other.separate - and self.itemSeparator == other.itemSeparator - and self.valueFrom == other.valueFrom - and self.shellQuote == other.shellQuote - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.loadContents, - self.position, - self.prefix, - self.separate, - self.itemSeparator, - self.valueFrom, - self.shellQuote, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandLineBinding": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - if "position" in _doc: - try: - position = load_field( - _doc.get("position"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'position' field is not valid because:", - SourceLine(_doc, "position", str), - [e], - ) - ) - else: - position = None - if "prefix" in _doc: - try: - prefix = load_field( - _doc.get("prefix"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'prefix' field is not valid because:", - SourceLine(_doc, "prefix", str), - [e], - ) - ) - else: - prefix = None - if "separate" in _doc: - try: - separate = load_field( - _doc.get("separate"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'separate' field is not valid because:", - SourceLine(_doc, "separate", str), - [e], - ) - ) - else: - separate = None - if "itemSeparator" in _doc: - try: - itemSeparator = load_field( - _doc.get("itemSeparator"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'itemSeparator' field is not valid because:", - SourceLine(_doc, "itemSeparator", str), - [e], - ) - ) - else: - itemSeparator = None - if "valueFrom" in _doc: - try: - valueFrom = load_field( - _doc.get("valueFrom"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'valueFrom' field is not valid because:", - SourceLine(_doc, "valueFrom", str), - [e], - ) - ) - else: - valueFrom = None - if "shellQuote" in _doc: - try: - shellQuote = load_field( - _doc.get("shellQuote"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'shellQuote' field is not valid because:", - SourceLine(_doc, "shellQuote", str), - [e], - ) - ) - else: - shellQuote = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `loadContents`, `position`, `prefix`, `separate`, `itemSeparator`, `valueFrom`, `shellQuote`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'CommandLineBinding'", None, _errors__) - _constructed = cls( - loadContents=loadContents, - position=position, - prefix=prefix, - separate=separate, - itemSeparator=itemSeparator, - valueFrom=valueFrom, - shellQuote=shellQuote, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.position is not None and "position" not in r: - r["position"] = save( - self.position, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="position", - val=r.get("position"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.prefix is not None and "prefix" not in r: - r["prefix"] = save( - self.prefix, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="prefix", - val=r.get("prefix"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.separate is not None and "separate" not in r: - r["separate"] = save( - self.separate, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="separate", - val=r.get("separate"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.itemSeparator is not None and "itemSeparator" not in r: - r["itemSeparator"] = save( - self.itemSeparator, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="itemSeparator", - val=r.get("itemSeparator"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.valueFrom is not None and "valueFrom" not in r: - r["valueFrom"] = save( - self.valueFrom, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="valueFrom", - val=r.get("valueFrom"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.shellQuote is not None and "shellQuote" not in r: - r["shellQuote"] = save( - self.shellQuote, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="shellQuote", - val=r.get("shellQuote"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "loadContents", - "position", - "prefix", - "separate", - "itemSeparator", - "valueFrom", - "shellQuote", - ] - ) - - -class CommandOutputBinding(LoadContents): - """ - Describes how to generate an output parameter based on the files produced - by a CommandLineTool. - - The output parameter value is generated by applying these operations in the - following order: - - - glob - - loadContents - - outputEval - - secondaryFiles - - """ - - def __init__( - self, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - glob: Optional[Any] = None, - outputEval: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.loadContents = loadContents - self.loadListing = loadListing - self.glob = glob - self.outputEval = outputEval - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputBinding): - return bool( - self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.glob == other.glob - and self.outputEval == other.outputEval - ) - return False - - def __hash__(self) -> int: - return hash((self.loadContents, self.loadListing, self.glob, self.outputEval)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputBinding": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadListing' field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - ) - ) - else: - loadListing = None - if "glob" in _doc: - try: - glob = load_field( - _doc.get("glob"), - union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'glob' field is not valid because:", - SourceLine(_doc, "glob", str), - [e], - ) - ) - else: - glob = None - if "outputEval" in _doc: - try: - outputEval = load_field( - _doc.get("outputEval"), - union_of_None_type_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputEval' field is not valid because:", - SourceLine(_doc, "outputEval", str), - [e], - ) - ) - else: - outputEval = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `loadContents`, `loadListing`, `glob`, `outputEval`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'CommandOutputBinding'", None, _errors__) - _constructed = cls( - loadContents=loadContents, - loadListing=loadListing, - glob=glob, - outputEval=outputEval, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadListing is not None and "loadListing" not in r: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadListing", - val=r.get("loadListing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.glob is not None and "glob" not in r: - r["glob"] = save( - self.glob, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="glob", - val=r.get("glob"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputEval is not None and "outputEval" not in r: - r["outputEval"] = save( - self.outputEval, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputEval", - val=r.get("outputEval"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["loadContents", "loadListing", "glob", "outputEval"]) - - -class CommandLineBindable(Saveable): - def __init__( - self, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandLineBindable): - return bool(self.inputBinding == other.inputBinding) - return False - - def __hash__(self) -> int: - return hash((self.inputBinding)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandLineBindable": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'CommandLineBindable'", None, _errors__) - _constructed = cls( - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["inputBinding"]) - - -class CommandInputRecordField(InputRecordField, CommandLineBindable): - def __init__( - self, - name: Any, - type: Any, - doc: Optional[Any] = None, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.doc = doc - self.name = name - self.type = type - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.format = format - self.loadContents = loadContents - self.loadListing = loadListing - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputRecordField): - return bool( - self.doc == other.doc - and self.name == other.name - and self.type == other.type - and self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.format == other.format - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.doc, - self.name, - self.type, - self.label, - self.secondaryFiles, - self.streamable, - self.format, - self.loadContents, - self.loadListing, - self.inputBinding, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandInputRecordField": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - raise ValidationException("Missing name") - if not __original_name_is_none: - baseuri = name - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadListing' field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - ) - ) - else: - loadListing = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandInputRecordField'", None, _errors__ - ) - _constructed = cls( - doc=doc, - name=name, - type=type, - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - format=format, - loadContents=loadContents, - loadListing=loadListing, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri( - self.format, str(self.name), True, None, relative_uris - ) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadListing is not None and "loadListing" not in r: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadListing", - val=r.get("loadListing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "doc", - "name", - "type", - "label", - "secondaryFiles", - "streamable", - "format", - "loadContents", - "loadListing", - "inputBinding", - ] - ) - - -class CommandInputRecordSchema( - InputRecordSchema, CommandInputSchema, CommandLineBindable -): - def __init__( - self, - type: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type = type - self.label = label - self.doc = doc - self.name = name - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputRecordSchema): - return bool( - self.fields == other.fields - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - (self.fields, self.type, self.label, self.doc, self.name, self.inputBinding) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandInputRecordSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'fields' field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - ) - ) - else: - fields = None - try: - type = load_field( - _doc.get("type"), - typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandInputRecordSchema'", None, _errors__ - ) - _constructed = cls( - fields=fields, - type=type, - label=label, - doc=doc, - name=name, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.fields is not None and "fields" not in r: - r["fields"] = save( - self.fields, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="fields", - val=r.get("fields"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type", "label", "doc", "name", "inputBinding"]) - - -class CommandInputEnumSchema(InputEnumSchema, CommandInputSchema, CommandLineBindable): - def __init__( - self, - symbols: Any, - type: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.symbols = symbols - self.type = type - self.label = label - self.doc = doc - self.name = name - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputEnumSchema): - return bool( - self.symbols == other.symbols - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.symbols, - self.type, - self.label, - self.doc, - self.name, - self.inputBinding, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandInputEnumSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'symbols' field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`, `label`, `doc`, `name`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandInputEnumSchema'", None, _errors__ - ) - _constructed = cls( - symbols=symbols, - type=type, - label=label, - doc=doc, - name=name, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) - r["symbols"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="symbols", - val=r.get("symbols"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["symbols", "type", "label", "doc", "name", "inputBinding"]) - - -class CommandInputArraySchema( - InputArraySchema, CommandInputSchema, CommandLineBindable -): - def __init__( - self, - items: Any, - type: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type = type - self.label = label - self.doc = doc - self.name = name - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputArraySchema): - return bool( - self.items == other.items - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - (self.items, self.type, self.label, self.doc, self.name, self.inputBinding) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandInputArraySchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - items = load_field( - _doc.get("items"), - typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'items' field is not valid because:", - SourceLine(_doc, "items", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandInputArraySchema'", None, _errors__ - ) - _constructed = cls( - items=items, - type=type, - label=label, - doc=doc, - name=name, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.items is not None and "items" not in r: - r["items"] = save( - self.items, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="items", - val=r.get("items"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type", "label", "doc", "name", "inputBinding"]) - - -class CommandOutputRecordField(OutputRecordField): - def __init__( - self, - name: Any, - type: Any, - doc: Optional[Any] = None, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - format: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.doc = doc - self.name = name - self.type = type - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.format = format - self.outputBinding = outputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputRecordField): - return bool( - self.doc == other.doc - and self.name == other.name - and self.type == other.type - and self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.format == other.format - and self.outputBinding == other.outputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.doc, - self.name, - self.type, - self.label, - self.secondaryFiles, - self.streamable, - self.format, - self.outputBinding, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputRecordField": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - raise ValidationException("Missing name") - if not __original_name_is_none: - baseuri = name - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputBinding' field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - ) - ) - else: - outputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `outputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandOutputRecordField'", None, _errors__ - ) - _constructed = cls( - doc=doc, - name=name, - type=type, - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - format=format, - outputBinding=outputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri( - self.format, str(self.name), True, None, relative_uris - ) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputBinding is not None and "outputBinding" not in r: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputBinding", - val=r.get("outputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "doc", - "name", - "type", - "label", - "secondaryFiles", - "streamable", - "format", - "outputBinding", - ] - ) - - -class CommandOutputRecordSchema(OutputRecordSchema): - def __init__( - self, - type: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type = type - self.label = label - self.doc = doc - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputRecordSchema): - return bool( - self.fields == other.fields - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.fields, self.type, self.label, self.doc, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputRecordSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'fields' field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - ) - ) - else: - fields = None - try: - type = load_field( - _doc.get("type"), - typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandOutputRecordSchema'", None, _errors__ - ) - _constructed = cls( - fields=fields, - type=type, - label=label, - doc=doc, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.fields is not None and "fields" not in r: - r["fields"] = save( - self.fields, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="fields", - val=r.get("fields"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type", "label", "doc", "name"]) - - -class CommandOutputEnumSchema(OutputEnumSchema): - def __init__( - self, - symbols: Any, - type: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.symbols = symbols - self.type = type - self.label = label - self.doc = doc - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputEnumSchema): - return bool( - self.symbols == other.symbols - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.symbols, self.type, self.label, self.doc, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputEnumSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'symbols' field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `symbols`, `type`, `label`, `doc`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandOutputEnumSchema'", None, _errors__ - ) - _constructed = cls( - symbols=symbols, - type=type, - label=label, - doc=doc, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) - r["symbols"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="symbols", - val=r.get("symbols"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["symbols", "type", "label", "doc", "name"]) - - -class CommandOutputArraySchema(OutputArraySchema): - def __init__( - self, - items: Any, - type: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type = type - self.label = label - self.doc = doc - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputArraySchema): - return bool( - self.items == other.items - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.items, self.type, self.label, self.doc, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputArraySchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - items = load_field( - _doc.get("items"), - typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'items' field is not valid because:", - SourceLine(_doc, "items", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandOutputArraySchema'", None, _errors__ - ) - _constructed = cls( - items=items, - type=type, - label=label, - doc=doc, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.items is not None and "items" not in r: - r["items"] = save( - self.items, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="items", - val=r.get("items"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type", "label", "doc", "name"]) - - -class CommandInputParameter(InputParameter): - """ - An input parameter for a CommandLineTool. - """ - - def __init__( - self, - type: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - id: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - default: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.format = format - self.loadContents = loadContents - self.loadListing = loadListing - self.default = default - self.type = type - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.default == other.default - and self.type == other.type - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.loadContents, - self.loadListing, - self.default, - self.type, - self.inputBinding, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandInputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadListing' field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - ) - ) - else: - loadListing = None - if "default" in _doc: - try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'default' field is not valid because:", - SourceLine(_doc, "default", str), - [e], - ) - ) - else: - default = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'CommandInputParameter'", None, _errors__) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - loadContents=loadContents, - loadListing=loadListing, - default=default, - type=type, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadListing is not None and "loadListing" not in r: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadListing", - val=r.get("loadListing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.default is not None and "default" not in r: - r["default"] = save( - self.default, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="default", - val=r.get("default"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "format", - "loadContents", - "loadListing", - "default", - "type", - "inputBinding", - ] - ) - - -class CommandOutputParameter(OutputParameter): - """ - An output parameter for a CommandLineTool. - """ - - def __init__( - self, - type: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - id: Optional[Any] = None, - format: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.format = format - self.type = type - self.outputBinding = outputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.type == other.type - and self.outputBinding == other.outputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.type, - self.outputBinding, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputBinding' field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - ) - ) - else: - outputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`, `outputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandOutputParameter'", None, _errors__ - ) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - type=type, - outputBinding=outputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputBinding is not None and "outputBinding" not in r: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputBinding", - val=r.get("outputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "format", - "type", - "outputBinding", - ] - ) - - -class CommandLineTool(Process): - """ - This defines the schema of the CWL Command Line Tool Description document. - - """ - - def __init__( - self, - inputs: Any, - outputs: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - baseCommand: Optional[Any] = None, - arguments: Optional[Any] = None, - stdin: Optional[Any] = None, - stderr: Optional[Any] = None, - stdout: Optional[Any] = None, - successCodes: Optional[Any] = None, - temporaryFailCodes: Optional[Any] = None, - permanentFailCodes: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - self.label = label - self.doc = doc - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.cwlVersion = cwlVersion - self.class_ = "CommandLineTool" - self.baseCommand = baseCommand - self.arguments = arguments - self.stdin = stdin - self.stderr = stderr - self.stdout = stdout - self.successCodes = successCodes - self.temporaryFailCodes = temporaryFailCodes - self.permanentFailCodes = permanentFailCodes - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandLineTool): - return bool( - self.id == other.id - and self.label == other.label - and self.doc == other.doc - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.cwlVersion == other.cwlVersion - and self.class_ == other.class_ - and self.baseCommand == other.baseCommand - and self.arguments == other.arguments - and self.stdin == other.stdin - and self.stderr == other.stderr - and self.stdout == other.stdout - and self.successCodes == other.successCodes - and self.temporaryFailCodes == other.temporaryFailCodes - and self.permanentFailCodes == other.permanentFailCodes - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.label, - self.doc, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.cwlVersion, - self.class_, - self.baseCommand, - self.arguments, - self.stdin, - self.stderr, - self.stdout, - self.successCodes, - self.temporaryFailCodes, - self.permanentFailCodes, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandLineTool": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "CommandLineTool": - raise ValidationException("Not a CommandLineTool") - - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_CommandInputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputs' field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - ) - ) - try: - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_CommandOutputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputs' field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - ) - ) - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'requirements' field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - ) - ) - else: - requirements = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'hints' field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - ) - ) - else: - hints = None - if "cwlVersion" in _doc: - try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'cwlVersion' field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [e], - ) - ) - else: - cwlVersion = None - if "baseCommand" in _doc: - try: - baseCommand = load_field( - _doc.get("baseCommand"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'baseCommand' field is not valid because:", - SourceLine(_doc, "baseCommand", str), - [e], - ) - ) - else: - baseCommand = None - if "arguments" in _doc: - try: - arguments = load_field( - _doc.get("arguments"), - union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'arguments' field is not valid because:", - SourceLine(_doc, "arguments", str), - [e], - ) - ) - else: - arguments = None - if "stdin" in _doc: - try: - stdin = load_field( - _doc.get("stdin"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'stdin' field is not valid because:", - SourceLine(_doc, "stdin", str), - [e], - ) - ) - else: - stdin = None - if "stderr" in _doc: - try: - stderr = load_field( - _doc.get("stderr"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'stderr' field is not valid because:", - SourceLine(_doc, "stderr", str), - [e], - ) - ) - else: - stderr = None - if "stdout" in _doc: - try: - stdout = load_field( - _doc.get("stdout"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'stdout' field is not valid because:", - SourceLine(_doc, "stdout", str), - [e], - ) - ) - else: - stdout = None - if "successCodes" in _doc: - try: - successCodes = load_field( - _doc.get("successCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'successCodes' field is not valid because:", - SourceLine(_doc, "successCodes", str), - [e], - ) - ) - else: - successCodes = None - if "temporaryFailCodes" in _doc: - try: - temporaryFailCodes = load_field( - _doc.get("temporaryFailCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'temporaryFailCodes' field is not valid because:", - SourceLine(_doc, "temporaryFailCodes", str), - [e], - ) - ) - else: - temporaryFailCodes = None - if "permanentFailCodes" in _doc: - try: - permanentFailCodes = load_field( - _doc.get("permanentFailCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'permanentFailCodes' field is not valid because:", - SourceLine(_doc, "permanentFailCodes", str), - [e], - ) - ) - else: - permanentFailCodes = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `class`, `baseCommand`, `arguments`, `stdin`, `stderr`, `stdout`, `successCodes`, `temporaryFailCodes`, `permanentFailCodes`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'CommandLineTool'", None, _errors__) - _constructed = cls( - id=id, - label=label, - doc=doc, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - cwlVersion=cwlVersion, - baseCommand=baseCommand, - arguments=arguments, - stdin=stdin, - stderr=stderr, - stdout=stdout, - successCodes=successCodes, - temporaryFailCodes=temporaryFailCodes, - permanentFailCodes=permanentFailCodes, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "CommandLineTool" - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputs is not None and "inputs" not in r: - r["inputs"] = save( - self.inputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputs", - val=r.get("inputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputs is not None and "outputs" not in r: - r["outputs"] = save( - self.outputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputs", - val=r.get("outputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.requirements is not None and "requirements" not in r: - r["requirements"] = save( - self.requirements, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="requirements", - val=r.get("requirements"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.hints is not None and "hints" not in r: - r["hints"] = save( - self.hints, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="hints", - val=r.get("hints"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) - r["cwlVersion"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="cwlVersion", - val=r.get("cwlVersion"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.baseCommand is not None and "baseCommand" not in r: - r["baseCommand"] = save( - self.baseCommand, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="baseCommand", - val=r.get("baseCommand"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.arguments is not None and "arguments" not in r: - r["arguments"] = save( - self.arguments, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="arguments", - val=r.get("arguments"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.stdin is not None and "stdin" not in r: - r["stdin"] = save( - self.stdin, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="stdin", - val=r.get("stdin"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.stderr is not None and "stderr" not in r: - r["stderr"] = save( - self.stderr, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="stderr", - val=r.get("stderr"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.stdout is not None and "stdout" not in r: - r["stdout"] = save( - self.stdout, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="stdout", - val=r.get("stdout"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.successCodes is not None and "successCodes" not in r: - r["successCodes"] = save( - self.successCodes, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="successCodes", - val=r.get("successCodes"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.temporaryFailCodes is not None and "temporaryFailCodes" not in r: - r["temporaryFailCodes"] = save( - self.temporaryFailCodes, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="temporaryFailCodes", - val=r.get("temporaryFailCodes"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.permanentFailCodes is not None and "permanentFailCodes" not in r: - r["permanentFailCodes"] = save( - self.permanentFailCodes, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="permanentFailCodes", - val=r.get("permanentFailCodes"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "label", - "doc", - "inputs", - "outputs", - "requirements", - "hints", - "cwlVersion", - "class", - "baseCommand", - "arguments", - "stdin", - "stderr", - "stdout", - "successCodes", - "temporaryFailCodes", - "permanentFailCodes", - ] - ) - - -class DockerRequirement(ProcessRequirement): - """ - Indicates that a workflow component should be run in a - [Docker](http://docker.com) or Docker-compatible (such as - [Singularity](https://www.sylabs.io/) and [udocker](https://github.com/indigo-dc/udocker)) container environment and - specifies how to fetch or build the image. - - If a CommandLineTool lists `DockerRequirement` under - `hints` (or `requirements`), it may (or must) be run in the specified Docker - container. - - The platform must first acquire or install the correct Docker image as - specified by `dockerPull`, `dockerImport`, `dockerLoad` or `dockerFile`. - - The platform must execute the tool in the container using `docker run` with - the appropriate Docker image and tool command line. - - The workflow platform may provide input files and the designated output - directory through the use of volume bind mounts. The platform should rewrite - file paths in the input object to correspond to the Docker bind mounted - locations. That is, the platform should rewrite values in the parameter context - such as `runtime.outdir`, `runtime.tmpdir` and others to be valid paths - within the container. The platform must ensure that `runtime.outdir` and - `runtime.tmpdir` are distinct directories. - - When running a tool contained in Docker, the workflow platform must not - assume anything about the contents of the Docker container, such as the - presence or absence of specific software, except to assume that the - generated command line represents a valid command within the runtime - environment of the container. - - A container image may specify an - [ENTRYPOINT](https://docs.docker.com/engine/reference/builder/#entrypoint) - and/or - [CMD](https://docs.docker.com/engine/reference/builder/#cmd). - Command line arguments will be appended after all elements of - ENTRYPOINT, and will override all elements specified using CMD (in - other words, CMD is only used when the CommandLineTool definition - produces an empty command line). - - Use of implicit ENTRYPOINT or CMD are discouraged due to reproducibility - concerns of the implicit hidden execution point (For further discussion, see - [https://doi.org/10.12688/f1000research.15140.1](https://doi.org/10.12688/f1000research.15140.1)). Portable - CommandLineTool wrappers in which use of a container is optional must not rely on ENTRYPOINT or CMD. - CommandLineTools which do rely on ENTRYPOINT or CMD must list `DockerRequirement` in the - `requirements` section. - - ## Interaction with other requirements - - If [EnvVarRequirement](#EnvVarRequirement) is specified alongside a - DockerRequirement, the environment variables must be provided to Docker - using `--env` or `--env-file` and interact with the container's preexisting - environment as defined by Docker. - - """ - - def __init__( - self, - dockerPull: Optional[Any] = None, - dockerLoad: Optional[Any] = None, - dockerFile: Optional[Any] = None, - dockerImport: Optional[Any] = None, - dockerImageId: Optional[Any] = None, - dockerOutputDirectory: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "DockerRequirement" - self.dockerPull = dockerPull - self.dockerLoad = dockerLoad - self.dockerFile = dockerFile - self.dockerImport = dockerImport - self.dockerImageId = dockerImageId - self.dockerOutputDirectory = dockerOutputDirectory - - def __eq__(self, other: Any) -> bool: - if isinstance(other, DockerRequirement): - return bool( - self.class_ == other.class_ - and self.dockerPull == other.dockerPull - and self.dockerLoad == other.dockerLoad - and self.dockerFile == other.dockerFile - and self.dockerImport == other.dockerImport - and self.dockerImageId == other.dockerImageId - and self.dockerOutputDirectory == other.dockerOutputDirectory - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.class_, - self.dockerPull, - self.dockerLoad, - self.dockerFile, - self.dockerImport, - self.dockerImageId, - self.dockerOutputDirectory, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "DockerRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "DockerRequirement": - raise ValidationException("Not a DockerRequirement") - - if "dockerPull" in _doc: - try: - dockerPull = load_field( - _doc.get("dockerPull"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerPull' field is not valid because:", - SourceLine(_doc, "dockerPull", str), - [e], - ) - ) - else: - dockerPull = None - if "dockerLoad" in _doc: - try: - dockerLoad = load_field( - _doc.get("dockerLoad"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerLoad' field is not valid because:", - SourceLine(_doc, "dockerLoad", str), - [e], - ) - ) - else: - dockerLoad = None - if "dockerFile" in _doc: - try: - dockerFile = load_field( - _doc.get("dockerFile"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerFile' field is not valid because:", - SourceLine(_doc, "dockerFile", str), - [e], - ) - ) - else: - dockerFile = None - if "dockerImport" in _doc: - try: - dockerImport = load_field( - _doc.get("dockerImport"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerImport' field is not valid because:", - SourceLine(_doc, "dockerImport", str), - [e], - ) - ) - else: - dockerImport = None - if "dockerImageId" in _doc: - try: - dockerImageId = load_field( - _doc.get("dockerImageId"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerImageId' field is not valid because:", - SourceLine(_doc, "dockerImageId", str), - [e], - ) - ) - else: - dockerImageId = None - if "dockerOutputDirectory" in _doc: - try: - dockerOutputDirectory = load_field( - _doc.get("dockerOutputDirectory"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerOutputDirectory' field is not valid because:", - SourceLine(_doc, "dockerOutputDirectory", str), - [e], - ) - ) - else: - dockerOutputDirectory = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `dockerPull`, `dockerLoad`, `dockerFile`, `dockerImport`, `dockerImageId`, `dockerOutputDirectory`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'DockerRequirement'", None, _errors__) - _constructed = cls( - dockerPull=dockerPull, - dockerLoad=dockerLoad, - dockerFile=dockerFile, - dockerImport=dockerImport, - dockerImageId=dockerImageId, - dockerOutputDirectory=dockerOutputDirectory, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "DockerRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.dockerPull is not None and "dockerPull" not in r: - r["dockerPull"] = save( - self.dockerPull, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerPull", - val=r.get("dockerPull"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dockerLoad is not None and "dockerLoad" not in r: - r["dockerLoad"] = save( - self.dockerLoad, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerLoad", - val=r.get("dockerLoad"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dockerFile is not None and "dockerFile" not in r: - r["dockerFile"] = save( - self.dockerFile, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerFile", - val=r.get("dockerFile"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dockerImport is not None and "dockerImport" not in r: - r["dockerImport"] = save( - self.dockerImport, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerImport", - val=r.get("dockerImport"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dockerImageId is not None and "dockerImageId" not in r: - r["dockerImageId"] = save( - self.dockerImageId, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerImageId", - val=r.get("dockerImageId"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dockerOutputDirectory is not None and "dockerOutputDirectory" not in r: - r["dockerOutputDirectory"] = save( - self.dockerOutputDirectory, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerOutputDirectory", - val=r.get("dockerOutputDirectory"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "class", - "dockerPull", - "dockerLoad", - "dockerFile", - "dockerImport", - "dockerImageId", - "dockerOutputDirectory", - ] - ) - - -class SoftwareRequirement(ProcessRequirement): - """ - A list of software packages that should be configured in the environment of - the defined process. - - """ - - def __init__( - self, - packages: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "SoftwareRequirement" - self.packages = packages - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SoftwareRequirement): - return bool(self.class_ == other.class_ and self.packages == other.packages) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.packages)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "SoftwareRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "SoftwareRequirement": - raise ValidationException("Not a SoftwareRequirement") - - try: - packages = load_field( - _doc.get("packages"), - idmap_packages_array_of_SoftwarePackageLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'packages' field is not valid because:", - SourceLine(_doc, "packages", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `packages`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'SoftwareRequirement'", None, _errors__) - _constructed = cls( - packages=packages, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "SoftwareRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.packages is not None and "packages" not in r: - r["packages"] = save( - self.packages, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="packages", - val=r.get("packages"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "packages"]) - - -class SoftwarePackage(Saveable): - def __init__( - self, - package: Any, - version: Optional[Any] = None, - specs: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.package = package - self.version = version - self.specs = specs - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SoftwarePackage): - return bool( - self.package == other.package - and self.version == other.version - and self.specs == other.specs - ) - return False - - def __hash__(self) -> int: - return hash((self.package, self.version, self.specs)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "SoftwarePackage": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - package = load_field( - _doc.get("package"), - strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'package' field is not valid because:", - SourceLine(_doc, "package", str), - [e], - ) - ) - if "version" in _doc: - try: - version = load_field( - _doc.get("version"), - union_of_None_type_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'version' field is not valid because:", - SourceLine(_doc, "version", str), - [e], - ) - ) - else: - version = None - if "specs" in _doc: - try: - specs = load_field( - _doc.get("specs"), - uri_union_of_None_type_or_array_of_strtype_False_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'specs' field is not valid because:", - SourceLine(_doc, "specs", str), - [e], - ) - ) - else: - specs = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `package`, `version`, `specs`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'SoftwarePackage'", None, _errors__) - _constructed = cls( - package=package, - version=version, - specs=specs, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.package is not None and "package" not in r: - r["package"] = save( - self.package, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="package", - val=r.get("package"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.version is not None and "version" not in r: - r["version"] = save( - self.version, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="version", - val=r.get("version"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.specs is not None and "specs" not in r: - u = save_relative_uri(self.specs, base_url, False, None, relative_uris) - r["specs"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="specs", - val=r.get("specs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["package", "version", "specs"]) - - -class Dirent(Saveable): - """ - Define a file or subdirectory that must be placed in the designated output - directory prior to executing the command line tool. May be the result of - executing an expression, such as building a configuration file from a - template. - - """ - - def __init__( - self, - entry: Any, - entryname: Optional[Any] = None, - writable: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.entryname = entryname - self.entry = entry - self.writable = writable - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Dirent): - return bool( - self.entryname == other.entryname - and self.entry == other.entry - and self.writable == other.writable - ) - return False - - def __hash__(self) -> int: - return hash((self.entryname, self.entry, self.writable)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "Dirent": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "entryname" in _doc: - try: - entryname = load_field( - _doc.get("entryname"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'entryname' field is not valid because:", - SourceLine(_doc, "entryname", str), - [e], - ) - ) - else: - entryname = None - try: - entry = load_field( - _doc.get("entry"), - union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'entry' field is not valid because:", - SourceLine(_doc, "entry", str), - [e], - ) - ) - if "writable" in _doc: - try: - writable = load_field( - _doc.get("writable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'writable' field is not valid because:", - SourceLine(_doc, "writable", str), - [e], - ) - ) - else: - writable = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `entryname`, `entry`, `writable`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'Dirent'", None, _errors__) - _constructed = cls( - entryname=entryname, - entry=entry, - writable=writable, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.entryname is not None and "entryname" not in r: - r["entryname"] = save( - self.entryname, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="entryname", - val=r.get("entryname"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.entry is not None and "entry" not in r: - r["entry"] = save( - self.entry, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="entry", - val=r.get("entry"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.writable is not None and "writable" not in r: - r["writable"] = save( - self.writable, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="writable", - val=r.get("writable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["entryname", "entry", "writable"]) - - -class InitialWorkDirRequirement(ProcessRequirement): - """ - Define a list of files and subdirectories that must be created by the workflow platform in the designated output directory prior to executing the command line tool. - """ - - def __init__( - self, - listing: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "InitialWorkDirRequirement" - self.listing = listing - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InitialWorkDirRequirement): - return bool(self.class_ == other.class_ and self.listing == other.listing) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.listing)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InitialWorkDirRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "InitialWorkDirRequirement": - raise ValidationException("Not a InitialWorkDirRequirement") - - try: - listing = load_field( - _doc.get("listing"), - union_of_array_of_union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'listing' field is not valid because:", - SourceLine(_doc, "listing", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `listing`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'InitialWorkDirRequirement'", None, _errors__ - ) - _constructed = cls( - listing=listing, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "InitialWorkDirRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.listing is not None and "listing" not in r: - r["listing"] = save( - self.listing, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="listing", - val=r.get("listing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "listing"]) - - -class EnvVarRequirement(ProcessRequirement): - """ - Define a list of environment variables which will be set in the - execution environment of the tool. See `EnvironmentDef` for details. - - """ - - def __init__( - self, - envDef: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "EnvVarRequirement" - self.envDef = envDef - - def __eq__(self, other: Any) -> bool: - if isinstance(other, EnvVarRequirement): - return bool(self.class_ == other.class_ and self.envDef == other.envDef) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.envDef)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "EnvVarRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "EnvVarRequirement": - raise ValidationException("Not a EnvVarRequirement") - - try: - envDef = load_field( - _doc.get("envDef"), - idmap_envDef_array_of_EnvironmentDefLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'envDef' field is not valid because:", - SourceLine(_doc, "envDef", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `envDef`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'EnvVarRequirement'", None, _errors__) - _constructed = cls( - envDef=envDef, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "EnvVarRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.envDef is not None and "envDef" not in r: - r["envDef"] = save( - self.envDef, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="envDef", - val=r.get("envDef"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "envDef"]) - - -class ShellCommandRequirement(ProcessRequirement): - """ - Modify the behavior of CommandLineTool to generate a single string - containing a shell command line. Each item in the argument list must be - joined into a string separated by single spaces and quoted to prevent - intepretation by the shell, unless `CommandLineBinding` for that argument - contains `shellQuote: false`. If `shellQuote: false` is specified, the - argument is joined into the command string without quoting, which allows - the use of shell metacharacters such as `|` for pipes. - - """ - - def __init__( - self, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ShellCommandRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ShellCommandRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ShellCommandRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "ShellCommandRequirement": - raise ValidationException("Not a ShellCommandRequirement") - - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'ShellCommandRequirement'", None, _errors__ - ) - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "ShellCommandRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class ResourceRequirement(ProcessRequirement): - """ - Specify basic hardware resource requirements. - - "min" is the minimum amount of a resource that must be reserved to schedule - a job. If "min" cannot be satisfied, the job should not be run. - - "max" is the maximum amount of a resource that the job shall be permitted - to use. If a node has sufficient resources, multiple jobs may be scheduled - on a single node provided each job's "max" resource requirements are - met. If a job attempts to exceed its "max" resource allocation, an - implementation may deny additional resources, which may result in job - failure. - - If "min" is specified but "max" is not, then "max" == "min" - If "max" is specified by "min" is not, then "min" == "max". - - It is an error if max < min. - - It is an error if the value of any of these fields is negative. - - If neither "min" nor "max" is specified for a resource, use the default values below. - - """ - - def __init__( - self, - coresMin: Optional[Any] = None, - coresMax: Optional[Any] = None, - ramMin: Optional[Any] = None, - ramMax: Optional[Any] = None, - tmpdirMin: Optional[Any] = None, - tmpdirMax: Optional[Any] = None, - outdirMin: Optional[Any] = None, - outdirMax: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ResourceRequirement" - self.coresMin = coresMin - self.coresMax = coresMax - self.ramMin = ramMin - self.ramMax = ramMax - self.tmpdirMin = tmpdirMin - self.tmpdirMax = tmpdirMax - self.outdirMin = outdirMin - self.outdirMax = outdirMax - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ResourceRequirement): - return bool( - self.class_ == other.class_ - and self.coresMin == other.coresMin - and self.coresMax == other.coresMax - and self.ramMin == other.ramMin - and self.ramMax == other.ramMax - and self.tmpdirMin == other.tmpdirMin - and self.tmpdirMax == other.tmpdirMax - and self.outdirMin == other.outdirMin - and self.outdirMax == other.outdirMax - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.class_, - self.coresMin, - self.coresMax, - self.ramMin, - self.ramMax, - self.tmpdirMin, - self.tmpdirMax, - self.outdirMin, - self.outdirMax, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ResourceRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "ResourceRequirement": - raise ValidationException("Not a ResourceRequirement") - - if "coresMin" in _doc: - try: - coresMin = load_field( - _doc.get("coresMin"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'coresMin' field is not valid because:", - SourceLine(_doc, "coresMin", str), - [e], - ) - ) - else: - coresMin = None - if "coresMax" in _doc: - try: - coresMax = load_field( - _doc.get("coresMax"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'coresMax' field is not valid because:", - SourceLine(_doc, "coresMax", str), - [e], - ) - ) - else: - coresMax = None - if "ramMin" in _doc: - try: - ramMin = load_field( - _doc.get("ramMin"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'ramMin' field is not valid because:", - SourceLine(_doc, "ramMin", str), - [e], - ) - ) - else: - ramMin = None - if "ramMax" in _doc: - try: - ramMax = load_field( - _doc.get("ramMax"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'ramMax' field is not valid because:", - SourceLine(_doc, "ramMax", str), - [e], - ) - ) - else: - ramMax = None - if "tmpdirMin" in _doc: - try: - tmpdirMin = load_field( - _doc.get("tmpdirMin"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'tmpdirMin' field is not valid because:", - SourceLine(_doc, "tmpdirMin", str), - [e], - ) - ) - else: - tmpdirMin = None - if "tmpdirMax" in _doc: - try: - tmpdirMax = load_field( - _doc.get("tmpdirMax"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'tmpdirMax' field is not valid because:", - SourceLine(_doc, "tmpdirMax", str), - [e], - ) - ) - else: - tmpdirMax = None - if "outdirMin" in _doc: - try: - outdirMin = load_field( - _doc.get("outdirMin"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outdirMin' field is not valid because:", - SourceLine(_doc, "outdirMin", str), - [e], - ) - ) - else: - outdirMin = None - if "outdirMax" in _doc: - try: - outdirMax = load_field( - _doc.get("outdirMax"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outdirMax' field is not valid because:", - SourceLine(_doc, "outdirMax", str), - [e], - ) - ) - else: - outdirMax = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `coresMin`, `coresMax`, `ramMin`, `ramMax`, `tmpdirMin`, `tmpdirMax`, `outdirMin`, `outdirMax`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'ResourceRequirement'", None, _errors__) - _constructed = cls( - coresMin=coresMin, - coresMax=coresMax, - ramMin=ramMin, - ramMax=ramMax, - tmpdirMin=tmpdirMin, - tmpdirMax=tmpdirMax, - outdirMin=outdirMin, - outdirMax=outdirMax, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "ResourceRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.coresMin is not None and "coresMin" not in r: - r["coresMin"] = save( - self.coresMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="coresMin", - val=r.get("coresMin"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.coresMax is not None and "coresMax" not in r: - r["coresMax"] = save( - self.coresMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="coresMax", - val=r.get("coresMax"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.ramMin is not None and "ramMin" not in r: - r["ramMin"] = save( - self.ramMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="ramMin", - val=r.get("ramMin"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.ramMax is not None and "ramMax" not in r: - r["ramMax"] = save( - self.ramMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="ramMax", - val=r.get("ramMax"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.tmpdirMin is not None and "tmpdirMin" not in r: - r["tmpdirMin"] = save( - self.tmpdirMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="tmpdirMin", - val=r.get("tmpdirMin"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.tmpdirMax is not None and "tmpdirMax" not in r: - r["tmpdirMax"] = save( - self.tmpdirMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="tmpdirMax", - val=r.get("tmpdirMax"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outdirMin is not None and "outdirMin" not in r: - r["outdirMin"] = save( - self.outdirMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outdirMin", - val=r.get("outdirMin"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outdirMax is not None and "outdirMax" not in r: - r["outdirMax"] = save( - self.outdirMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outdirMax", - val=r.get("outdirMax"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "class", - "coresMin", - "coresMax", - "ramMin", - "ramMax", - "tmpdirMin", - "tmpdirMax", - "outdirMin", - "outdirMax", - ] - ) - - -class WorkReuse(ProcessRequirement): - """ - For implementations that support reusing output from past work (on - the assumption that same code and same input produce same - results), control whether to enable or disable the reuse behavior - for a particular tool or step (to accomodate situations where that - assumption is incorrect). A reused step is not executed but - instead returns the same output as the original execution. - - If `enableReuse` is not specified, correct tools should assume it - is enabled by default. - - """ - - def __init__( - self, - enableReuse: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "WorkReuse" - self.enableReuse = enableReuse - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkReuse): - return bool( - self.class_ == other.class_ and self.enableReuse == other.enableReuse - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.enableReuse)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "WorkReuse": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "WorkReuse": - raise ValidationException("Not a WorkReuse") - - try: - enableReuse = load_field( - _doc.get("enableReuse"), - union_of_booltype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'enableReuse' field is not valid because:", - SourceLine(_doc, "enableReuse", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `enableReuse`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'WorkReuse'", None, _errors__) - _constructed = cls( - enableReuse=enableReuse, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "WorkReuse" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.enableReuse is not None and "enableReuse" not in r: - r["enableReuse"] = save( - self.enableReuse, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="enableReuse", - val=r.get("enableReuse"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "enableReuse"]) - - -class NetworkAccess(ProcessRequirement): - """ - Indicate whether a process requires outgoing IPv4/IPv6 network - access. Choice of IPv4 or IPv6 is implementation and site - specific, correct tools must support both. - - If `networkAccess` is false or not specified, tools must not - assume network access, except for localhost (the loopback device). - - If `networkAccess` is true, the tool must be able to make outgoing - connections to network resources. Resources may be on a private - subnet or the public Internet. However, implementations and sites - may apply their own security policies to restrict what is - accessible by the tool. - - Enabling network access does not imply a publically routable IP - address or the ability to accept inbound connections. - - """ - - def __init__( - self, - networkAccess: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "NetworkAccess" - self.networkAccess = networkAccess - - def __eq__(self, other: Any) -> bool: - if isinstance(other, NetworkAccess): - return bool( - self.class_ == other.class_ - and self.networkAccess == other.networkAccess - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.networkAccess)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "NetworkAccess": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "NetworkAccess": - raise ValidationException("Not a NetworkAccess") - - try: - networkAccess = load_field( - _doc.get("networkAccess"), - union_of_booltype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'networkAccess' field is not valid because:", - SourceLine(_doc, "networkAccess", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `networkAccess`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'NetworkAccess'", None, _errors__) - _constructed = cls( - networkAccess=networkAccess, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "NetworkAccess" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.networkAccess is not None and "networkAccess" not in r: - r["networkAccess"] = save( - self.networkAccess, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="networkAccess", - val=r.get("networkAccess"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "networkAccess"]) - - -class InplaceUpdateRequirement(ProcessRequirement): - """ - - If `inplaceUpdate` is true, then an implementation supporting this - feature may permit tools to directly update files with `writable: - true` in InitialWorkDirRequirement. That is, as an optimization, - files may be destructively modified in place as opposed to copied - and updated. - - An implementation must ensure that only one workflow step may - access a writable file at a time. It is an error if a file which - is writable by one workflow step file is accessed (for reading or - writing) by any other workflow step running independently. - However, a file which has been updated in a previous completed - step may be used as input to multiple steps, provided it is - read-only in every step. - - Workflow steps which modify a file must produce the modified file - as output. Downstream steps which futher process the file must - use the output of previous steps, and not refer to a common input - (this is necessary for both ordering and correctness). - - Workflow authors should provide this in the `hints` section. The - intent of this feature is that workflows produce the same results - whether or not InplaceUpdateRequirement is supported by the - implementation, and this feature is primarily available as an - optimization for particular environments. - - Users and implementers should be aware that workflows that - destructively modify inputs may not be repeatable or reproducible. - In particular, enabling this feature implies that WorkReuse should - not be enabled. - - """ - - def __init__( - self, - inplaceUpdate: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "InplaceUpdateRequirement" - self.inplaceUpdate = inplaceUpdate - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InplaceUpdateRequirement): - return bool( - self.class_ == other.class_ - and self.inplaceUpdate == other.inplaceUpdate - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.inplaceUpdate)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InplaceUpdateRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "InplaceUpdateRequirement": - raise ValidationException("Not a InplaceUpdateRequirement") - - try: - inplaceUpdate = load_field( - _doc.get("inplaceUpdate"), - booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inplaceUpdate' field is not valid because:", - SourceLine(_doc, "inplaceUpdate", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `inplaceUpdate`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'InplaceUpdateRequirement'", None, _errors__ - ) - _constructed = cls( - inplaceUpdate=inplaceUpdate, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "InplaceUpdateRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.inplaceUpdate is not None and "inplaceUpdate" not in r: - r["inplaceUpdate"] = save( - self.inplaceUpdate, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inplaceUpdate", - val=r.get("inplaceUpdate"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "inplaceUpdate"]) - - -class ToolTimeLimit(ProcessRequirement): - """ - Set an upper limit on the execution time of a CommandLineTool. - A CommandLineTool whose execution duration exceeds the time - limit may be preemptively terminated and considered failed. - May also be used by batch systems to make scheduling decisions. - The execution duration excludes external operations, such as - staging of files, pulling a docker image etc, and only counts - wall-time for the execution of the command line itself. - - """ - - def __init__( - self, - timelimit: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ToolTimeLimit" - self.timelimit = timelimit - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ToolTimeLimit): - return bool( - self.class_ == other.class_ and self.timelimit == other.timelimit - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.timelimit)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ToolTimeLimit": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "ToolTimeLimit": - raise ValidationException("Not a ToolTimeLimit") - - try: - timelimit = load_field( - _doc.get("timelimit"), - union_of_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'timelimit' field is not valid because:", - SourceLine(_doc, "timelimit", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `timelimit`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'ToolTimeLimit'", None, _errors__) - _constructed = cls( - timelimit=timelimit, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "ToolTimeLimit" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.timelimit is not None and "timelimit" not in r: - r["timelimit"] = save( - self.timelimit, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="timelimit", - val=r.get("timelimit"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "timelimit"]) - - -class ExpressionToolOutputParameter(OutputParameter): - def __init__( - self, - type: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - id: Optional[Any] = None, - format: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.format = format - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ExpressionToolOutputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.type == other.type - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.type, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ExpressionToolOutputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'ExpressionToolOutputParameter'", None, _errors__ - ) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] - ) - - -class WorkflowInputParameter(InputParameter): - def __init__( - self, - type: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - id: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - default: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.format = format - self.loadContents = loadContents - self.loadListing = loadListing - self.default = default - self.type = type - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowInputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.default == other.default - and self.type == other.type - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.loadContents, - self.loadListing, - self.default, - self.type, - self.inputBinding, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "WorkflowInputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadListing' field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - ) - ) - else: - loadListing = None - if "default" in _doc: - try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'default' field is not valid because:", - SourceLine(_doc, "default", str), - [e], - ) - ) - else: - default = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_InputBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'WorkflowInputParameter'", None, _errors__ - ) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - loadContents=loadContents, - loadListing=loadListing, - default=default, - type=type, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadListing is not None and "loadListing" not in r: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadListing", - val=r.get("loadListing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.default is not None and "default" not in r: - r["default"] = save( - self.default, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="default", - val=r.get("default"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "format", - "loadContents", - "loadListing", - "default", - "type", - "inputBinding", - ] - ) - - -class ExpressionTool(Process): - """ - An ExpressionTool is a type of Process object that can be run by itself - or as a Workflow step. It executes a pure Javascript expression that has - access to the same input parameters as a workflow. It is meant to be used - sparingly as a way to isolate complex Javascript expressions that need to - operate on input data and produce some result; perhaps just a - rearrangement of the inputs. No Docker software container is required - or allowed. - - """ - - def __init__( - self, - inputs: Any, - outputs: Any, - expression: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - self.label = label - self.doc = doc - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.cwlVersion = cwlVersion - self.class_ = "ExpressionTool" - self.expression = expression - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ExpressionTool): - return bool( - self.id == other.id - and self.label == other.label - and self.doc == other.doc - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.cwlVersion == other.cwlVersion - and self.class_ == other.class_ - and self.expression == other.expression - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.label, - self.doc, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.cwlVersion, - self.class_, - self.expression, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ExpressionTool": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "ExpressionTool": - raise ValidationException("Not a ExpressionTool") - - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_WorkflowInputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputs' field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - ) - ) - try: - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_ExpressionToolOutputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputs' field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - ) - ) - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'requirements' field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - ) - ) - else: - requirements = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'hints' field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - ) - ) - else: - hints = None - if "cwlVersion" in _doc: - try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'cwlVersion' field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [e], - ) - ) - else: - cwlVersion = None - try: - expression = load_field( - _doc.get("expression"), - ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'expression' field is not valid because:", - SourceLine(_doc, "expression", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `class`, `expression`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'ExpressionTool'", None, _errors__) - _constructed = cls( - id=id, - label=label, - doc=doc, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - cwlVersion=cwlVersion, - expression=expression, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "ExpressionTool" - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputs is not None and "inputs" not in r: - r["inputs"] = save( - self.inputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputs", - val=r.get("inputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputs is not None and "outputs" not in r: - r["outputs"] = save( - self.outputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputs", - val=r.get("outputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.requirements is not None and "requirements" not in r: - r["requirements"] = save( - self.requirements, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="requirements", - val=r.get("requirements"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.hints is not None and "hints" not in r: - r["hints"] = save( - self.hints, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="hints", - val=r.get("hints"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) - r["cwlVersion"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="cwlVersion", - val=r.get("cwlVersion"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.expression is not None and "expression" not in r: - r["expression"] = save( - self.expression, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="expression", - val=r.get("expression"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "label", - "doc", - "inputs", - "outputs", - "requirements", - "hints", - "cwlVersion", - "class", - "expression", - ] - ) - - -class WorkflowOutputParameter(OutputParameter): - """ - Describe an output parameter of a workflow. The parameter must be - connected to one or more parameters defined in the workflow that - will provide the value of the output parameter. It is legal to - connect a WorkflowInputParameter to a WorkflowOutputParameter. - - """ - - def __init__( - self, - type: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - id: Optional[Any] = None, - format: Optional[Any] = None, - outputSource: Optional[Any] = None, - linkMerge: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.format = format - self.outputSource = outputSource - self.linkMerge = linkMerge - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowOutputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.outputSource == other.outputSource - and self.linkMerge == other.linkMerge - and self.type == other.type - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.outputSource, - self.linkMerge, - self.type, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "WorkflowOutputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "outputSource" in _doc: - try: - outputSource = load_field( - _doc.get("outputSource"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputSource' field is not valid because:", - SourceLine(_doc, "outputSource", str), - [e], - ) - ) - else: - outputSource = None - if "linkMerge" in _doc: - try: - linkMerge = load_field( - _doc.get("linkMerge"), - union_of_None_type_or_LinkMergeMethodLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'linkMerge' field is not valid because:", - SourceLine(_doc, "linkMerge", str), - [e], - ) - ) - else: - linkMerge = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `outputSource`, `linkMerge`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'WorkflowOutputParameter'", None, _errors__ - ) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - outputSource=outputSource, - linkMerge=linkMerge, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputSource is not None and "outputSource" not in r: - u = save_relative_uri( - self.outputSource, str(self.id), False, 1, relative_uris - ) - r["outputSource"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputSource", - val=r.get("outputSource"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.linkMerge is not None and "linkMerge" not in r: - r["linkMerge"] = save( - self.linkMerge, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="linkMerge", - val=r.get("linkMerge"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "format", - "outputSource", - "linkMerge", - "type", - ] - ) - - -class Sink(Saveable): - pass - - -class WorkflowStepInput(Identified, Sink, LoadContents, Labeled): - """ - The input of a workflow step connects an upstream parameter (from the - workflow inputs, or the outputs of other workflows steps) with the input - parameters of the process specified by the `run` field. Only input parameters - declared by the target process will be passed through at runtime to the process - though additonal parameters may be specified (for use within `valueFrom` - expressions for instance) - unconnected or unused parameters do not represent an - error condition. - - ## Input object - - A WorkflowStepInput object must contain an `id` field in the form - `#fieldname` or `#prefix/fieldname`. When the `id` field contains a slash - `/` the field name consists of the characters following the final slash - (the prefix portion may contain one or more slashes to indicate scope). - This defines a field of the workflow step input object with the value of - the `source` parameter(s). - - ## Merging - - To merge multiple inbound data links, - [MultipleInputFeatureRequirement](#MultipleInputFeatureRequirement) must be specified - in the workflow or workflow step requirements. - - If the sink parameter is an array, or named in a [workflow - scatter](#WorkflowStep) operation, there may be multiple inbound data links - listed in the `source` field. The values from the input links are merged - depending on the method specified in the `linkMerge` field. If not - specified, the default method is "merge_nested". - - * **merge_nested** - - The input must be an array consisting of exactly one entry for each - input link. If "merge_nested" is specified with a single link, the value - from the link must be wrapped in a single-item list. - - * **merge_flattened** - - 1. The source and sink parameters must be compatible types, or the source - type must be compatible with single element from the "items" type of - the destination array parameter. - 2. Source parameters which are arrays are concatenated. - Source parameters which are single element types are appended as - single elements. - - """ - - def __init__( - self, - id: Optional[Any] = None, - source: Optional[Any] = None, - linkMerge: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - label: Optional[Any] = None, - default: Optional[Any] = None, - valueFrom: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - self.source = source - self.linkMerge = linkMerge - self.loadContents = loadContents - self.loadListing = loadListing - self.label = label - self.default = default - self.valueFrom = valueFrom - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowStepInput): - return bool( - self.id == other.id - and self.source == other.source - and self.linkMerge == other.linkMerge - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.label == other.label - and self.default == other.default - and self.valueFrom == other.valueFrom - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.source, - self.linkMerge, - self.loadContents, - self.loadListing, - self.label, - self.default, - self.valueFrom, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "WorkflowStepInput": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "source" in _doc: - try: - source = load_field( - _doc.get("source"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'source' field is not valid because:", - SourceLine(_doc, "source", str), - [e], - ) - ) - else: - source = None - if "linkMerge" in _doc: - try: - linkMerge = load_field( - _doc.get("linkMerge"), - union_of_None_type_or_LinkMergeMethodLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'linkMerge' field is not valid because:", - SourceLine(_doc, "linkMerge", str), - [e], - ) - ) - else: - linkMerge = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadListing' field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - ) - ) - else: - loadListing = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "default" in _doc: - try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'default' field is not valid because:", - SourceLine(_doc, "default", str), - [e], - ) - ) - else: - default = None - if "valueFrom" in _doc: - try: - valueFrom = load_field( - _doc.get("valueFrom"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'valueFrom' field is not valid because:", - SourceLine(_doc, "valueFrom", str), - [e], - ) - ) - else: - valueFrom = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `source`, `linkMerge`, `loadContents`, `loadListing`, `label`, `default`, `valueFrom`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'WorkflowStepInput'", None, _errors__) - _constructed = cls( - id=id, - source=source, - linkMerge=linkMerge, - loadContents=loadContents, - loadListing=loadListing, - label=label, - default=default, - valueFrom=valueFrom, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.source is not None and "source" not in r: - u = save_relative_uri(self.source, str(self.id), False, 2, relative_uris) - r["source"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="source", - val=r.get("source"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.linkMerge is not None and "linkMerge" not in r: - r["linkMerge"] = save( - self.linkMerge, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="linkMerge", - val=r.get("linkMerge"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadListing is not None and "loadListing" not in r: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadListing", - val=r.get("loadListing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.default is not None and "default" not in r: - r["default"] = save( - self.default, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="default", - val=r.get("default"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.valueFrom is not None and "valueFrom" not in r: - r["valueFrom"] = save( - self.valueFrom, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="valueFrom", - val=r.get("valueFrom"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "source", - "linkMerge", - "loadContents", - "loadListing", - "label", - "default", - "valueFrom", - ] - ) - - -class WorkflowStepOutput(Identified): - """ - Associate an output parameter of the underlying process with a workflow - parameter. The workflow parameter (given in the `id` field) be may be used - as a `source` to connect with input parameters of other workflow steps, or - with an output parameter of the process. - - A unique identifier for this workflow output parameter. This is - the identifier to use in the `source` field of `WorkflowStepInput` - to connect the output value to downstream parameters. - - """ - - def __init__( - self, - id: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowStepOutput): - return bool(self.id == other.id) - return False - - def __hash__(self) -> int: - return hash((self.id)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "WorkflowStepOutput": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'WorkflowStepOutput'", None, _errors__) - _constructed = cls( - id=id, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["id"]) - - -class WorkflowStep(Identified, Labeled, Documented): - """ - A workflow step is an executable element of a workflow. It specifies the - underlying process implementation (such as `CommandLineTool` or another - `Workflow`) in the `run` field and connects the input and output parameters - of the underlying process to workflow parameters. - - # Scatter/gather - - To use scatter/gather, - [ScatterFeatureRequirement](#ScatterFeatureRequirement) must be specified - in the workflow or workflow step requirements. - - A "scatter" operation specifies that the associated workflow step or - subworkflow should execute separately over a list of input elements. Each - job making up a scatter operation is independent and may be executed - concurrently. - - The `scatter` field specifies one or more input parameters which will be - scattered. An input parameter may be listed more than once. The declared - type of each input parameter is implicitly becomes an array of items of the - input parameter type. If a parameter is listed more than once, it becomes - a nested array. As a result, upstream parameters which are connected to - scattered parameters must be arrays. - - All output parameter types are also implicitly wrapped in arrays. Each job - in the scatter results in an entry in the output array. - - If any scattered parameter runtime value is an empty array, all outputs are - set to empty arrays and no work is done for the step, according to - applicable scattering rules. - - If `scatter` declares more than one input parameter, `scatterMethod` - describes how to decompose the input into a discrete set of jobs. - - * **dotproduct** specifies that each of the input arrays are aligned and one - element taken from each array to construct each job. It is an error - if all input arrays are not the same length. - - * **nested_crossproduct** specifies the Cartesian product of the inputs, - producing a job for every combination of the scattered inputs. The - output must be nested arrays for each level of scattering, in the - order that the input arrays are listed in the `scatter` field. - - * **flat_crossproduct** specifies the Cartesian product of the inputs, - producing a job for every combination of the scattered inputs. The - output arrays must be flattened to a single level, but otherwise listed in the - order that the input arrays are listed in the `scatter` field. - - # Subworkflows - - To specify a nested workflow as part of a workflow step, - [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) must be - specified in the workflow or workflow step requirements. - - It is a fatal error if a workflow directly or indirectly invokes itself as - a subworkflow (recursive workflows are not allowed). - - """ - - def __init__( - self, - in_: Any, - out: Any, - run: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - scatter: Optional[Any] = None, - scatterMethod: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - self.label = label - self.doc = doc - self.in_ = in_ - self.out = out - self.requirements = requirements - self.hints = hints - self.run = run - self.scatter = scatter - self.scatterMethod = scatterMethod - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowStep): - return bool( - self.id == other.id - and self.label == other.label - and self.doc == other.doc - and self.in_ == other.in_ - and self.out == other.out - and self.requirements == other.requirements - and self.hints == other.hints - and self.run == other.run - and self.scatter == other.scatter - and self.scatterMethod == other.scatterMethod - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.label, - self.doc, - self.in_, - self.out, - self.requirements, - self.hints, - self.run, - self.scatter, - self.scatterMethod, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "WorkflowStep": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - in_ = load_field( - _doc.get("in"), - idmap_in__array_of_WorkflowStepInputLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'in' field is not valid because:", - SourceLine(_doc, "in", str), - [e], - ) - ) - try: - out = load_field( - _doc.get("out"), - uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'out' field is not valid because:", - SourceLine(_doc, "out", str), - [e], - ) - ) - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'requirements' field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - ) - ) - else: - requirements = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'hints' field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - ) - ) - else: - hints = None - - subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) - try: - run = load_field( - _doc.get("run"), - uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None, - subscope_baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'run' field is not valid because:", - SourceLine(_doc, "run", str), - [e], - ) - ) - if "scatter" in _doc: - try: - scatter = load_field( - _doc.get("scatter"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'scatter' field is not valid because:", - SourceLine(_doc, "scatter", str), - [e], - ) - ) - else: - scatter = None - if "scatterMethod" in _doc: - try: - scatterMethod = load_field( - _doc.get("scatterMethod"), - uri_union_of_None_type_or_ScatterMethodLoader_False_True_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'scatterMethod' field is not valid because:", - SourceLine(_doc, "scatterMethod", str), - [e], - ) - ) - else: - scatterMethod = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `in`, `out`, `requirements`, `hints`, `run`, `scatter`, `scatterMethod`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'WorkflowStep'", None, _errors__) - _constructed = cls( - id=id, - label=label, - doc=doc, - in_=in_, - out=out, - requirements=requirements, - hints=hints, - run=run, - scatter=scatter, - scatterMethod=scatterMethod, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.in_ is not None and "in" not in r: - r["in"] = save( - self.in_, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="in", - val=r.get("in"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.out is not None and "out" not in r: - u = save_relative_uri(self.out, str(self.id), True, None, relative_uris) - r["out"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="out", - val=r.get("out"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.requirements is not None and "requirements" not in r: - r["requirements"] = save( - self.requirements, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="requirements", - val=r.get("requirements"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.hints is not None and "hints" not in r: - r["hints"] = save( - self.hints, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="hints", - val=r.get("hints"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.run is not None and "run" not in r: - u = save_relative_uri(self.run, str(self.id), False, None, relative_uris) - r["run"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="run", - val=r.get("run"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.scatter is not None and "scatter" not in r: - u = save_relative_uri(self.scatter, str(self.id), False, 0, relative_uris) - r["scatter"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="scatter", - val=r.get("scatter"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.scatterMethod is not None and "scatterMethod" not in r: - u = save_relative_uri( - self.scatterMethod, str(self.id), False, None, relative_uris - ) - r["scatterMethod"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="scatterMethod", - val=r.get("scatterMethod"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "label", - "doc", - "in", - "out", - "requirements", - "hints", - "run", - "scatter", - "scatterMethod", - ] - ) - - -class Workflow(Process): - """ - A workflow describes a set of **steps** and the **dependencies** between - those steps. When a step produces output that will be consumed by a - second step, the first step is a dependency of the second step. - - When there is a dependency, the workflow engine must execute the preceding - step and wait for it to successfully produce output before executing the - dependent step. If two steps are defined in the workflow graph that - are not directly or indirectly dependent, these steps are **independent**, - and may execute in any order or execute concurrently. A workflow is - complete when all steps have been executed. - - Dependencies between parameters are expressed using the `source` field on - [workflow step input parameters](#WorkflowStepInput) and [workflow output - parameters](#WorkflowOutputParameter). - - The `source` field expresses the dependency of one parameter on another - such that when a value is associated with the parameter specified by - `source`, that value is propagated to the destination parameter. When all - data links inbound to a given step are fufilled, the step is ready to - execute. - - ## Workflow success and failure - - A completed step must result in one of `success`, `temporaryFailure` or - `permanentFailure` states. An implementation may choose to retry a step - execution which resulted in `temporaryFailure`. An implementation may - choose to either continue running other steps of a workflow, or terminate - immediately upon `permanentFailure`. - - * If any step of a workflow execution results in `permanentFailure`, then - the workflow status is `permanentFailure`. - - * If one or more steps result in `temporaryFailure` and all other steps - complete `success` or are not executed, then the workflow status is - `temporaryFailure`. - - * If all workflow steps are executed and complete with `success`, then the - workflow status is `success`. - - # Extensions - - [ScatterFeatureRequirement](#ScatterFeatureRequirement) and - [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) are - available as standard [extensions](#Extensions_and_Metadata) to core - workflow semantics. - - """ - - def __init__( - self, - inputs: Any, - outputs: Any, - steps: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - self.label = label - self.doc = doc - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.cwlVersion = cwlVersion - self.class_ = "Workflow" - self.steps = steps - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Workflow): - return bool( - self.id == other.id - and self.label == other.label - and self.doc == other.doc - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.cwlVersion == other.cwlVersion - and self.class_ == other.class_ - and self.steps == other.steps - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.label, - self.doc, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.cwlVersion, - self.class_, - self.steps, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "Workflow": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "Workflow": - raise ValidationException("Not a Workflow") - - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_WorkflowInputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputs' field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - ) - ) - try: - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_WorkflowOutputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputs' field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - ) - ) - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'requirements' field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - ) - ) - else: - requirements = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'hints' field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - ) - ) - else: - hints = None - if "cwlVersion" in _doc: - try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'cwlVersion' field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [e], - ) - ) - else: - cwlVersion = None - try: - steps = load_field( - _doc.get("steps"), - idmap_steps_union_of_array_of_WorkflowStepLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'steps' field is not valid because:", - SourceLine(_doc, "steps", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `class`, `steps`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'Workflow'", None, _errors__) - _constructed = cls( - id=id, - label=label, - doc=doc, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - cwlVersion=cwlVersion, - steps=steps, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "Workflow" - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputs is not None and "inputs" not in r: - r["inputs"] = save( - self.inputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputs", - val=r.get("inputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputs is not None and "outputs" not in r: - r["outputs"] = save( - self.outputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputs", - val=r.get("outputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.requirements is not None and "requirements" not in r: - r["requirements"] = save( - self.requirements, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="requirements", - val=r.get("requirements"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.hints is not None and "hints" not in r: - r["hints"] = save( - self.hints, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="hints", - val=r.get("hints"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) - r["cwlVersion"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="cwlVersion", - val=r.get("cwlVersion"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.steps is not None and "steps" not in r: - r["steps"] = save( - self.steps, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="steps", - val=r.get("steps"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "label", - "doc", - "inputs", - "outputs", - "requirements", - "hints", - "cwlVersion", - "class", - "steps", - ] - ) - - -class SubworkflowFeatureRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support nested workflows in - the `run` field of [WorkflowStep](#WorkflowStep). - - """ - - def __init__( - self, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "SubworkflowFeatureRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SubworkflowFeatureRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "SubworkflowFeatureRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "SubworkflowFeatureRequirement": - raise ValidationException("Not a SubworkflowFeatureRequirement") - - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'SubworkflowFeatureRequirement'", None, _errors__ - ) - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "SubworkflowFeatureRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class ScatterFeatureRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support the `scatter` and - `scatterMethod` fields of [WorkflowStep](#WorkflowStep). - - """ - - def __init__( - self, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ScatterFeatureRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ScatterFeatureRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ScatterFeatureRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "ScatterFeatureRequirement": - raise ValidationException("Not a ScatterFeatureRequirement") - - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'ScatterFeatureRequirement'", None, _errors__ - ) - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "ScatterFeatureRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class MultipleInputFeatureRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support multiple inbound data links - listed in the `source` field of [WorkflowStepInput](#WorkflowStepInput). - - """ - - def __init__( - self, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "MultipleInputFeatureRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, MultipleInputFeatureRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "MultipleInputFeatureRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "MultipleInputFeatureRequirement": - raise ValidationException("Not a MultipleInputFeatureRequirement") - - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'MultipleInputFeatureRequirement'", None, _errors__ - ) - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "MultipleInputFeatureRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class StepInputExpressionRequirement(ProcessRequirement): - """ - Indicate that the workflow platform must support the `valueFrom` field - of [WorkflowStepInput](#WorkflowStepInput). - - """ - - def __init__( - self, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "StepInputExpressionRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, StepInputExpressionRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "StepInputExpressionRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "StepInputExpressionRequirement": - raise ValidationException("Not a StepInputExpressionRequirement") - - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'StepInputExpressionRequirement'", None, _errors__ - ) - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "StepInputExpressionRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -_vocab = { - "Any": "https://w3id.org/cwl/salad#Any", - "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", - "CWLType": "https://w3id.org/cwl/cwl#CWLType", - "CWLVersion": "https://w3id.org/cwl/cwl#CWLVersion", - "CommandInputArraySchema": "https://w3id.org/cwl/cwl#CommandInputArraySchema", - "CommandInputEnumSchema": "https://w3id.org/cwl/cwl#CommandInputEnumSchema", - "CommandInputParameter": "https://w3id.org/cwl/cwl#CommandInputParameter", - "CommandInputRecordField": "https://w3id.org/cwl/cwl#CommandInputRecordField", - "CommandInputRecordSchema": "https://w3id.org/cwl/cwl#CommandInputRecordSchema", - "CommandInputSchema": "https://w3id.org/cwl/cwl#CommandInputSchema", - "CommandLineBindable": "https://w3id.org/cwl/cwl#CommandLineBindable", - "CommandLineBinding": "https://w3id.org/cwl/cwl#CommandLineBinding", - "CommandLineTool": "https://w3id.org/cwl/cwl#CommandLineTool", - "CommandOutputArraySchema": "https://w3id.org/cwl/cwl#CommandOutputArraySchema", - "CommandOutputBinding": "https://w3id.org/cwl/cwl#CommandOutputBinding", - "CommandOutputEnumSchema": "https://w3id.org/cwl/cwl#CommandOutputEnumSchema", - "CommandOutputParameter": "https://w3id.org/cwl/cwl#CommandOutputParameter", - "CommandOutputRecordField": "https://w3id.org/cwl/cwl#CommandOutputRecordField", - "CommandOutputRecordSchema": "https://w3id.org/cwl/cwl#CommandOutputRecordSchema", - "Directory": "https://w3id.org/cwl/cwl#Directory", - "Dirent": "https://w3id.org/cwl/cwl#Dirent", - "DockerRequirement": "https://w3id.org/cwl/cwl#DockerRequirement", - "Documented": "https://w3id.org/cwl/salad#Documented", - "EnumSchema": "https://w3id.org/cwl/salad#EnumSchema", - "EnvVarRequirement": "https://w3id.org/cwl/cwl#EnvVarRequirement", - "EnvironmentDef": "https://w3id.org/cwl/cwl#EnvironmentDef", - "Expression": "https://w3id.org/cwl/cwl#Expression", - "ExpressionPlaceholder": "https://w3id.org/cwl/cwl#ExpressionPlaceholder", - "ExpressionTool": "https://w3id.org/cwl/cwl#ExpressionTool", - "ExpressionToolOutputParameter": "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter", - "FieldBase": "https://w3id.org/cwl/cwl#FieldBase", - "File": "https://w3id.org/cwl/cwl#File", - "IOSchema": "https://w3id.org/cwl/cwl#IOSchema", - "Identified": "https://w3id.org/cwl/cwl#Identified", - "InitialWorkDirRequirement": "https://w3id.org/cwl/cwl#InitialWorkDirRequirement", - "InlineJavascriptRequirement": "https://w3id.org/cwl/cwl#InlineJavascriptRequirement", - "InplaceUpdateRequirement": "https://w3id.org/cwl/cwl#InplaceUpdateRequirement", - "InputArraySchema": "https://w3id.org/cwl/cwl#InputArraySchema", - "InputBinding": "https://w3id.org/cwl/cwl#InputBinding", - "InputEnumSchema": "https://w3id.org/cwl/cwl#InputEnumSchema", - "InputFormat": "https://w3id.org/cwl/cwl#InputFormat", - "InputParameter": "https://w3id.org/cwl/cwl#InputParameter", - "InputRecordField": "https://w3id.org/cwl/cwl#InputRecordField", - "InputRecordSchema": "https://w3id.org/cwl/cwl#InputRecordSchema", - "InputSchema": "https://w3id.org/cwl/cwl#InputSchema", - "Labeled": "https://w3id.org/cwl/cwl#Labeled", - "LinkMergeMethod": "https://w3id.org/cwl/cwl#LinkMergeMethod", - "LoadContents": "https://w3id.org/cwl/cwl#LoadContents", - "LoadListingEnum": "https://w3id.org/cwl/cwl#LoadListingEnum", - "LoadListingRequirement": "https://w3id.org/cwl/cwl#LoadListingRequirement", - "MultipleInputFeatureRequirement": "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement", - "NetworkAccess": "https://w3id.org/cwl/cwl#NetworkAccess", - "OutputArraySchema": "https://w3id.org/cwl/cwl#OutputArraySchema", - "OutputEnumSchema": "https://w3id.org/cwl/cwl#OutputEnumSchema", - "OutputFormat": "https://w3id.org/cwl/cwl#OutputFormat", - "OutputParameter": "https://w3id.org/cwl/cwl#OutputParameter", - "OutputRecordField": "https://w3id.org/cwl/cwl#OutputRecordField", - "OutputRecordSchema": "https://w3id.org/cwl/cwl#OutputRecordSchema", - "OutputSchema": "https://w3id.org/cwl/cwl#OutputSchema", - "Parameter": "https://w3id.org/cwl/cwl#Parameter", - "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", - "Process": "https://w3id.org/cwl/cwl#Process", - "ProcessRequirement": "https://w3id.org/cwl/cwl#ProcessRequirement", - "RecordField": "https://w3id.org/cwl/salad#RecordField", - "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", - "ResourceRequirement": "https://w3id.org/cwl/cwl#ResourceRequirement", - "ScatterFeatureRequirement": "https://w3id.org/cwl/cwl#ScatterFeatureRequirement", - "ScatterMethod": "https://w3id.org/cwl/cwl#ScatterMethod", - "SchemaDefRequirement": "https://w3id.org/cwl/cwl#SchemaDefRequirement", - "SecondaryFileSchema": "https://w3id.org/cwl/cwl#SecondaryFileSchema", - "ShellCommandRequirement": "https://w3id.org/cwl/cwl#ShellCommandRequirement", - "Sink": "https://w3id.org/cwl/cwl#Sink", - "SoftwarePackage": "https://w3id.org/cwl/cwl#SoftwarePackage", - "SoftwareRequirement": "https://w3id.org/cwl/cwl#SoftwareRequirement", - "StepInputExpressionRequirement": "https://w3id.org/cwl/cwl#StepInputExpressionRequirement", - "SubworkflowFeatureRequirement": "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement", - "ToolTimeLimit": "https://w3id.org/cwl/cwl#ToolTimeLimit", - "WorkReuse": "https://w3id.org/cwl/cwl#WorkReuse", - "Workflow": "https://w3id.org/cwl/cwl#Workflow", - "WorkflowInputParameter": "https://w3id.org/cwl/cwl#WorkflowInputParameter", - "WorkflowOutputParameter": "https://w3id.org/cwl/cwl#WorkflowOutputParameter", - "WorkflowStep": "https://w3id.org/cwl/cwl#WorkflowStep", - "WorkflowStepInput": "https://w3id.org/cwl/cwl#WorkflowStepInput", - "WorkflowStepOutput": "https://w3id.org/cwl/cwl#WorkflowStepOutput", - "array": "https://w3id.org/cwl/salad#array", - "boolean": "http://www.w3.org/2001/XMLSchema#boolean", - "deep_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing", - "dotproduct": "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct", - "double": "http://www.w3.org/2001/XMLSchema#double", - "draft-2": "https://w3id.org/cwl/cwl#draft-2", - "draft-3": "https://w3id.org/cwl/cwl#draft-3", - "draft-3.dev1": "https://w3id.org/cwl/cwl#draft-3.dev1", - "draft-3.dev2": "https://w3id.org/cwl/cwl#draft-3.dev2", - "draft-3.dev3": "https://w3id.org/cwl/cwl#draft-3.dev3", - "draft-3.dev4": "https://w3id.org/cwl/cwl#draft-3.dev4", - "draft-3.dev5": "https://w3id.org/cwl/cwl#draft-3.dev5", - "draft-4.dev1": "https://w3id.org/cwl/cwl#draft-4.dev1", - "draft-4.dev2": "https://w3id.org/cwl/cwl#draft-4.dev2", - "draft-4.dev3": "https://w3id.org/cwl/cwl#draft-4.dev3", - "enum": "https://w3id.org/cwl/salad#enum", - "flat_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct", - "float": "http://www.w3.org/2001/XMLSchema#float", - "int": "http://www.w3.org/2001/XMLSchema#int", - "long": "http://www.w3.org/2001/XMLSchema#long", - "merge_flattened": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened", - "merge_nested": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested", - "nested_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct", - "no_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/no_listing", - "null": "https://w3id.org/cwl/salad#null", - "record": "https://w3id.org/cwl/salad#record", - "shallow_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/shallow_listing", - "stderr": "https://w3id.org/cwl/cwl#stderr", - "stdin": "https://w3id.org/cwl/cwl#stdin", - "stdout": "https://w3id.org/cwl/cwl#stdout", - "string": "http://www.w3.org/2001/XMLSchema#string", - "v1.0": "https://w3id.org/cwl/cwl#v1.0", - "v1.0.dev4": "https://w3id.org/cwl/cwl#v1.0.dev4", - "v1.1": "https://w3id.org/cwl/cwl#v1.1", - "v1.1.0-dev1": "https://w3id.org/cwl/cwl#v1.1.0-dev1", -} -_rvocab = { - "https://w3id.org/cwl/salad#Any": "Any", - "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", - "https://w3id.org/cwl/cwl#CWLType": "CWLType", - "https://w3id.org/cwl/cwl#CWLVersion": "CWLVersion", - "https://w3id.org/cwl/cwl#CommandInputArraySchema": "CommandInputArraySchema", - "https://w3id.org/cwl/cwl#CommandInputEnumSchema": "CommandInputEnumSchema", - "https://w3id.org/cwl/cwl#CommandInputParameter": "CommandInputParameter", - "https://w3id.org/cwl/cwl#CommandInputRecordField": "CommandInputRecordField", - "https://w3id.org/cwl/cwl#CommandInputRecordSchema": "CommandInputRecordSchema", - "https://w3id.org/cwl/cwl#CommandInputSchema": "CommandInputSchema", - "https://w3id.org/cwl/cwl#CommandLineBindable": "CommandLineBindable", - "https://w3id.org/cwl/cwl#CommandLineBinding": "CommandLineBinding", - "https://w3id.org/cwl/cwl#CommandLineTool": "CommandLineTool", - "https://w3id.org/cwl/cwl#CommandOutputArraySchema": "CommandOutputArraySchema", - "https://w3id.org/cwl/cwl#CommandOutputBinding": "CommandOutputBinding", - "https://w3id.org/cwl/cwl#CommandOutputEnumSchema": "CommandOutputEnumSchema", - "https://w3id.org/cwl/cwl#CommandOutputParameter": "CommandOutputParameter", - "https://w3id.org/cwl/cwl#CommandOutputRecordField": "CommandOutputRecordField", - "https://w3id.org/cwl/cwl#CommandOutputRecordSchema": "CommandOutputRecordSchema", - "https://w3id.org/cwl/cwl#Directory": "Directory", - "https://w3id.org/cwl/cwl#Dirent": "Dirent", - "https://w3id.org/cwl/cwl#DockerRequirement": "DockerRequirement", - "https://w3id.org/cwl/salad#Documented": "Documented", - "https://w3id.org/cwl/salad#EnumSchema": "EnumSchema", - "https://w3id.org/cwl/cwl#EnvVarRequirement": "EnvVarRequirement", - "https://w3id.org/cwl/cwl#EnvironmentDef": "EnvironmentDef", - "https://w3id.org/cwl/cwl#Expression": "Expression", - "https://w3id.org/cwl/cwl#ExpressionPlaceholder": "ExpressionPlaceholder", - "https://w3id.org/cwl/cwl#ExpressionTool": "ExpressionTool", - "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter": "ExpressionToolOutputParameter", - "https://w3id.org/cwl/cwl#FieldBase": "FieldBase", - "https://w3id.org/cwl/cwl#File": "File", - "https://w3id.org/cwl/cwl#IOSchema": "IOSchema", - "https://w3id.org/cwl/cwl#Identified": "Identified", - "https://w3id.org/cwl/cwl#InitialWorkDirRequirement": "InitialWorkDirRequirement", - "https://w3id.org/cwl/cwl#InlineJavascriptRequirement": "InlineJavascriptRequirement", - "https://w3id.org/cwl/cwl#InplaceUpdateRequirement": "InplaceUpdateRequirement", - "https://w3id.org/cwl/cwl#InputArraySchema": "InputArraySchema", - "https://w3id.org/cwl/cwl#InputBinding": "InputBinding", - "https://w3id.org/cwl/cwl#InputEnumSchema": "InputEnumSchema", - "https://w3id.org/cwl/cwl#InputFormat": "InputFormat", - "https://w3id.org/cwl/cwl#InputParameter": "InputParameter", - "https://w3id.org/cwl/cwl#InputRecordField": "InputRecordField", - "https://w3id.org/cwl/cwl#InputRecordSchema": "InputRecordSchema", - "https://w3id.org/cwl/cwl#InputSchema": "InputSchema", - "https://w3id.org/cwl/cwl#Labeled": "Labeled", - "https://w3id.org/cwl/cwl#LinkMergeMethod": "LinkMergeMethod", - "https://w3id.org/cwl/cwl#LoadContents": "LoadContents", - "https://w3id.org/cwl/cwl#LoadListingEnum": "LoadListingEnum", - "https://w3id.org/cwl/cwl#LoadListingRequirement": "LoadListingRequirement", - "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement": "MultipleInputFeatureRequirement", - "https://w3id.org/cwl/cwl#NetworkAccess": "NetworkAccess", - "https://w3id.org/cwl/cwl#OutputArraySchema": "OutputArraySchema", - "https://w3id.org/cwl/cwl#OutputEnumSchema": "OutputEnumSchema", - "https://w3id.org/cwl/cwl#OutputFormat": "OutputFormat", - "https://w3id.org/cwl/cwl#OutputParameter": "OutputParameter", - "https://w3id.org/cwl/cwl#OutputRecordField": "OutputRecordField", - "https://w3id.org/cwl/cwl#OutputRecordSchema": "OutputRecordSchema", - "https://w3id.org/cwl/cwl#OutputSchema": "OutputSchema", - "https://w3id.org/cwl/cwl#Parameter": "Parameter", - "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", - "https://w3id.org/cwl/cwl#Process": "Process", - "https://w3id.org/cwl/cwl#ProcessRequirement": "ProcessRequirement", - "https://w3id.org/cwl/salad#RecordField": "RecordField", - "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", - "https://w3id.org/cwl/cwl#ResourceRequirement": "ResourceRequirement", - "https://w3id.org/cwl/cwl#ScatterFeatureRequirement": "ScatterFeatureRequirement", - "https://w3id.org/cwl/cwl#ScatterMethod": "ScatterMethod", - "https://w3id.org/cwl/cwl#SchemaDefRequirement": "SchemaDefRequirement", - "https://w3id.org/cwl/cwl#SecondaryFileSchema": "SecondaryFileSchema", - "https://w3id.org/cwl/cwl#ShellCommandRequirement": "ShellCommandRequirement", - "https://w3id.org/cwl/cwl#Sink": "Sink", - "https://w3id.org/cwl/cwl#SoftwarePackage": "SoftwarePackage", - "https://w3id.org/cwl/cwl#SoftwareRequirement": "SoftwareRequirement", - "https://w3id.org/cwl/cwl#StepInputExpressionRequirement": "StepInputExpressionRequirement", - "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement": "SubworkflowFeatureRequirement", - "https://w3id.org/cwl/cwl#ToolTimeLimit": "ToolTimeLimit", - "https://w3id.org/cwl/cwl#WorkReuse": "WorkReuse", - "https://w3id.org/cwl/cwl#Workflow": "Workflow", - "https://w3id.org/cwl/cwl#WorkflowInputParameter": "WorkflowInputParameter", - "https://w3id.org/cwl/cwl#WorkflowOutputParameter": "WorkflowOutputParameter", - "https://w3id.org/cwl/cwl#WorkflowStep": "WorkflowStep", - "https://w3id.org/cwl/cwl#WorkflowStepInput": "WorkflowStepInput", - "https://w3id.org/cwl/cwl#WorkflowStepOutput": "WorkflowStepOutput", - "https://w3id.org/cwl/salad#array": "array", - "http://www.w3.org/2001/XMLSchema#boolean": "boolean", - "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing": "deep_listing", - "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct": "dotproduct", - "http://www.w3.org/2001/XMLSchema#double": "double", - "https://w3id.org/cwl/cwl#draft-2": "draft-2", - "https://w3id.org/cwl/cwl#draft-3": "draft-3", - "https://w3id.org/cwl/cwl#draft-3.dev1": "draft-3.dev1", - "https://w3id.org/cwl/cwl#draft-3.dev2": "draft-3.dev2", - "https://w3id.org/cwl/cwl#draft-3.dev3": "draft-3.dev3", - "https://w3id.org/cwl/cwl#draft-3.dev4": "draft-3.dev4", - "https://w3id.org/cwl/cwl#draft-3.dev5": "draft-3.dev5", - "https://w3id.org/cwl/cwl#draft-4.dev1": "draft-4.dev1", - "https://w3id.org/cwl/cwl#draft-4.dev2": "draft-4.dev2", - "https://w3id.org/cwl/cwl#draft-4.dev3": "draft-4.dev3", - "https://w3id.org/cwl/salad#enum": "enum", - "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct": "flat_crossproduct", - "http://www.w3.org/2001/XMLSchema#float": "float", - "http://www.w3.org/2001/XMLSchema#int": "int", - "http://www.w3.org/2001/XMLSchema#long": "long", - "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened": "merge_flattened", - "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested": "merge_nested", - "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct": "nested_crossproduct", - "https://w3id.org/cwl/cwl#LoadListingEnum/no_listing": "no_listing", - "https://w3id.org/cwl/salad#null": "null", - "https://w3id.org/cwl/salad#record": "record", - "https://w3id.org/cwl/cwl#LoadListingEnum/shallow_listing": "shallow_listing", - "https://w3id.org/cwl/cwl#stderr": "stderr", - "https://w3id.org/cwl/cwl#stdin": "stdin", - "https://w3id.org/cwl/cwl#stdout": "stdout", - "http://www.w3.org/2001/XMLSchema#string": "string", - "https://w3id.org/cwl/cwl#v1.0": "v1.0", - "https://w3id.org/cwl/cwl#v1.0.dev4": "v1.0.dev4", - "https://w3id.org/cwl/cwl#v1.1": "v1.1", - "https://w3id.org/cwl/cwl#v1.1.0-dev1": "v1.1.0-dev1", -} - -strtype = _PrimitiveLoader(str) -inttype = _PrimitiveLoader(int) -floattype = _PrimitiveLoader(float) -booltype = _PrimitiveLoader(bool) -None_type = _PrimitiveLoader(type(None)) -Any_type = _AnyLoader() -PrimitiveTypeLoader = _EnumLoader( - ( - "null", - "boolean", - "int", - "long", - "float", - "double", - "string", - ), - "PrimitiveType", -) -""" -Salad data types are based on Avro schema declarations. Refer to the -[Avro schema declaration documentation](https://avro.apache.org/docs/current/spec.html#schemas) for -detailed information. - -null: no value -boolean: a binary value -int: 32-bit signed integer -long: 64-bit signed integer -float: single precision (32-bit) IEEE 754 floating-point number -double: double precision (64-bit) IEEE 754 floating-point number -string: Unicode character sequence -""" -AnyLoader = _EnumLoader(("Any",), "Any") -""" -The **Any** type validates for any non-null value. -""" -RecordFieldLoader = _RecordLoader(RecordField) -RecordSchemaLoader = _RecordLoader(RecordSchema) -EnumSchemaLoader = _RecordLoader(EnumSchema) -ArraySchemaLoader = _RecordLoader(ArraySchema) -CWLVersionLoader = _EnumLoader( - ( - "draft-2", - "draft-3.dev1", - "draft-3.dev2", - "draft-3.dev3", - "draft-3.dev4", - "draft-3.dev5", - "draft-3", - "draft-4.dev1", - "draft-4.dev2", - "draft-4.dev3", - "v1.0.dev4", - "v1.0", - "v1.1.0-dev1", - "v1.1", - ), - "CWLVersion", -) -""" -Version symbols for published CWL document versions. -""" -CWLTypeLoader = _EnumLoader( - ( - "null", - "boolean", - "int", - "long", - "float", - "double", - "string", - "File", - "Directory", - ), - "CWLType", -) -""" -Extends primitive types with the concept of a file and directory as a builtin type. -File: A File object -Directory: A Directory object -""" -FileLoader = _RecordLoader(File) -DirectoryLoader = _RecordLoader(Directory) -LoadListingEnumLoader = _EnumLoader( - ( - "no_listing", - "shallow_listing", - "deep_listing", - ), - "LoadListingEnum", -) -""" -Specify the desired behavior for loading the `listing` field of -a Directory object for use by expressions. - -no_listing: Do not load the directory listing. -shallow_listing: Only load the top level listing, do not recurse into subdirectories. -deep_listing: Load the directory listing and recursively load all subdirectories as well. -""" -ExpressionLoader = _ExpressionLoader(str) -InputBindingLoader = _RecordLoader(InputBinding) -InputRecordFieldLoader = _RecordLoader(InputRecordField) -InputRecordSchemaLoader = _RecordLoader(InputRecordSchema) -InputEnumSchemaLoader = _RecordLoader(InputEnumSchema) -InputArraySchemaLoader = _RecordLoader(InputArraySchema) -OutputRecordFieldLoader = _RecordLoader(OutputRecordField) -OutputRecordSchemaLoader = _RecordLoader(OutputRecordSchema) -OutputEnumSchemaLoader = _RecordLoader(OutputEnumSchema) -OutputArraySchemaLoader = _RecordLoader(OutputArraySchema) -InlineJavascriptRequirementLoader = _RecordLoader(InlineJavascriptRequirement) -SchemaDefRequirementLoader = _RecordLoader(SchemaDefRequirement) -SecondaryFileSchemaLoader = _RecordLoader(SecondaryFileSchema) -LoadListingRequirementLoader = _RecordLoader(LoadListingRequirement) -EnvironmentDefLoader = _RecordLoader(EnvironmentDef) -CommandLineBindingLoader = _RecordLoader(CommandLineBinding) -CommandOutputBindingLoader = _RecordLoader(CommandOutputBinding) -CommandLineBindableLoader = _RecordLoader(CommandLineBindable) -CommandInputRecordFieldLoader = _RecordLoader(CommandInputRecordField) -CommandInputRecordSchemaLoader = _RecordLoader(CommandInputRecordSchema) -CommandInputEnumSchemaLoader = _RecordLoader(CommandInputEnumSchema) -CommandInputArraySchemaLoader = _RecordLoader(CommandInputArraySchema) -CommandOutputRecordFieldLoader = _RecordLoader(CommandOutputRecordField) -CommandOutputRecordSchemaLoader = _RecordLoader(CommandOutputRecordSchema) -CommandOutputEnumSchemaLoader = _RecordLoader(CommandOutputEnumSchema) -CommandOutputArraySchemaLoader = _RecordLoader(CommandOutputArraySchema) -CommandInputParameterLoader = _RecordLoader(CommandInputParameter) -CommandOutputParameterLoader = _RecordLoader(CommandOutputParameter) -stdinLoader = _EnumLoader(("stdin",), "stdin") -""" -Only valid as a `type` for a `CommandLineTool` input with no -`inputBinding` set. `stdin` must not be specified at the `CommandLineTool` -level. - -The following -``` -inputs: - an_input_name: - type: stdin -``` -is equivalent to -``` -inputs: - an_input_name: - type: File - streamable: true - -stdin: ${inputs.an_input_name.path} -``` -""" -stdoutLoader = _EnumLoader(("stdout",), "stdout") -""" -Only valid as a `type` for a `CommandLineTool` output with no -`outputBinding` set. - -The following -``` -outputs: - an_output_name: - type: stdout - -stdout: a_stdout_file -``` -is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: a_stdout_file - -stdout: a_stdout_file -``` - -If there is no `stdout` name provided, a random filename will be created. -For example, the following -``` -outputs: - an_output_name: - type: stdout -``` -is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: random_stdout_filenameABCDEFG - -stdout: random_stdout_filenameABCDEFG -``` -""" -stderrLoader = _EnumLoader(("stderr",), "stderr") -""" -Only valid as a `type` for a `CommandLineTool` output with no -`outputBinding` set. - -The following -``` -outputs: - an_output_name: - type: stderr - -stderr: a_stderr_file -``` -is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: a_stderr_file - -stderr: a_stderr_file -``` - -If there is no `stderr` name provided, a random filename will be created. -For example, the following -``` -outputs: - an_output_name: - type: stderr -``` -is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: random_stderr_filenameABCDEFG - -stderr: random_stderr_filenameABCDEFG -``` -""" -CommandLineToolLoader = _RecordLoader(CommandLineTool) -DockerRequirementLoader = _RecordLoader(DockerRequirement) -SoftwareRequirementLoader = _RecordLoader(SoftwareRequirement) -SoftwarePackageLoader = _RecordLoader(SoftwarePackage) -DirentLoader = _RecordLoader(Dirent) -InitialWorkDirRequirementLoader = _RecordLoader(InitialWorkDirRequirement) -EnvVarRequirementLoader = _RecordLoader(EnvVarRequirement) -ShellCommandRequirementLoader = _RecordLoader(ShellCommandRequirement) -ResourceRequirementLoader = _RecordLoader(ResourceRequirement) -WorkReuseLoader = _RecordLoader(WorkReuse) -NetworkAccessLoader = _RecordLoader(NetworkAccess) -InplaceUpdateRequirementLoader = _RecordLoader(InplaceUpdateRequirement) -ToolTimeLimitLoader = _RecordLoader(ToolTimeLimit) -ExpressionToolOutputParameterLoader = _RecordLoader(ExpressionToolOutputParameter) -WorkflowInputParameterLoader = _RecordLoader(WorkflowInputParameter) -ExpressionToolLoader = _RecordLoader(ExpressionTool) -LinkMergeMethodLoader = _EnumLoader( - ( - "merge_nested", - "merge_flattened", - ), - "LinkMergeMethod", -) -""" -The input link merge method, described in [WorkflowStepInput](#WorkflowStepInput). -""" -WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter) -WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput) -WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput) -ScatterMethodLoader = _EnumLoader( - ( - "dotproduct", - "nested_crossproduct", - "flat_crossproduct", - ), - "ScatterMethod", -) -""" -The scatter method, as described in [workflow step scatter](#WorkflowStep). -""" -WorkflowStepLoader = _RecordLoader(WorkflowStep) -WorkflowLoader = _RecordLoader(Workflow) -SubworkflowFeatureRequirementLoader = _RecordLoader(SubworkflowFeatureRequirement) -ScatterFeatureRequirementLoader = _RecordLoader(ScatterFeatureRequirement) -MultipleInputFeatureRequirementLoader = _RecordLoader(MultipleInputFeatureRequirement) -StepInputExpressionRequirementLoader = _RecordLoader(StepInputExpressionRequirement) -array_of_strtype = _ArrayLoader(strtype) -union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader( - ( - None_type, - strtype, - array_of_strtype, - ) -) -uri_strtype_True_False_None = _URILoader(strtype, True, False, None) -union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader( - ( - PrimitiveTypeLoader, - RecordSchemaLoader, - EnumSchemaLoader, - ArraySchemaLoader, - strtype, - ) -) -array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _ArrayLoader( - union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype -) -union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader( - ( - PrimitiveTypeLoader, - RecordSchemaLoader, - EnumSchemaLoader, - ArraySchemaLoader, - strtype, - array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, - 2, -) -array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) -union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader( - ( - None_type, - array_of_RecordFieldLoader, - ) -) -idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_RecordFieldLoader, "name", "type" -) -enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader = _EnumLoader( - ("record",), "enum_d9cba076fca539106791a4f46d198c7fcfbdb779" -) -typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2 = _TypeDSLLoader( - enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader, 2 -) -uri_array_of_strtype_True_False_None = _URILoader(array_of_strtype, True, False, None) -enum_d961d79c225752b9fadb617367615ab176b47d77Loader = _EnumLoader( - ("enum",), "enum_d961d79c225752b9fadb617367615ab176b47d77" -) -typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2 = _TypeDSLLoader( - enum_d961d79c225752b9fadb617367615ab176b47d77Loader, 2 -) -enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader = _EnumLoader( - ("array",), "enum_d062602be0b4b8fd33e69e29a841317b6ab665bc" -) -typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2 = _TypeDSLLoader( - enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader, 2 -) -File_classLoader = _EnumLoader(("File",), "File_class") -uri_File_classLoader_False_True_None = _URILoader(File_classLoader, False, True, None) -union_of_None_type_or_strtype = _UnionLoader( - ( - None_type, - strtype, - ) -) -uri_union_of_None_type_or_strtype_False_False_None = _URILoader( - union_of_None_type_or_strtype, False, False, None -) -union_of_None_type_or_inttype = _UnionLoader( - ( - None_type, - inttype, - ) -) -union_of_FileLoader_or_DirectoryLoader = _UnionLoader( - ( - FileLoader, - DirectoryLoader, - ) -) -array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( - union_of_FileLoader_or_DirectoryLoader -) -union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( - ( - None_type, - array_of_union_of_FileLoader_or_DirectoryLoader, - ) -) -secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _SecondaryDSLLoader( - union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader -) -uri_union_of_None_type_or_strtype_True_False_None = _URILoader( - union_of_None_type_or_strtype, True, False, None -) -Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") -uri_Directory_classLoader_False_True_None = _URILoader( - Directory_classLoader, False, True, None -) -union_of_None_type_or_booltype = _UnionLoader( - ( - None_type, - booltype, - ) -) -union_of_None_type_or_LoadListingEnumLoader = _UnionLoader( - ( - None_type, - LoadListingEnumLoader, - ) -) -array_of_SecondaryFileSchemaLoader = _ArrayLoader(SecondaryFileSchemaLoader) -union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _UnionLoader( - ( - None_type, - SecondaryFileSchemaLoader, - array_of_SecondaryFileSchemaLoader, - ) -) -secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _SecondaryDSLLoader( - union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader -) -union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader = _UnionLoader( - ( - None_type, - strtype, - array_of_strtype, - ExpressionLoader, - ) -) -uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, - True, - False, - None, -) -union_of_None_type_or_strtype_or_ExpressionLoader = _UnionLoader( - ( - None_type, - strtype, - ExpressionLoader, - ) -) -uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None = _URILoader( - union_of_None_type_or_strtype_or_ExpressionLoader, True, False, None -) -union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - InputRecordSchemaLoader, - InputEnumSchemaLoader, - InputArraySchemaLoader, - strtype, - ) -) -array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _ArrayLoader( - union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype -) -union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - InputRecordSchemaLoader, - InputEnumSchemaLoader, - InputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, - 2, -) -array_of_InputRecordFieldLoader = _ArrayLoader(InputRecordFieldLoader) -union_of_None_type_or_array_of_InputRecordFieldLoader = _UnionLoader( - ( - None_type, - array_of_InputRecordFieldLoader, - ) -) -idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_InputRecordFieldLoader, "name", "type" -) -union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - OutputRecordSchemaLoader, - OutputEnumSchemaLoader, - OutputArraySchemaLoader, - strtype, - ) -) -array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _ArrayLoader( - union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype -) -union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - OutputRecordSchemaLoader, - OutputEnumSchemaLoader, - OutputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, - 2, -) -array_of_OutputRecordFieldLoader = _ArrayLoader(OutputRecordFieldLoader) -union_of_None_type_or_array_of_OutputRecordFieldLoader = _UnionLoader( - ( - None_type, - array_of_OutputRecordFieldLoader, - ) -) -idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_OutputRecordFieldLoader, "name", "type" -) -union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type = _UnionLoader( - ( - None_type, - FileLoader, - DirectoryLoader, - Any_type, - ) -) -union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = _UnionLoader( - ( - CommandInputParameterLoader, - WorkflowInputParameterLoader, - ) -) -array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = ( - _ArrayLoader(union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader) -) -idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = _IdMapLoader( - array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader, - "id", - "type", -) -union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader = _UnionLoader( - ( - CommandOutputParameterLoader, - ExpressionToolOutputParameterLoader, - WorkflowOutputParameterLoader, - ) -) -array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader = _ArrayLoader( - union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader -) -idmap_outputs_array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader = _IdMapLoader( - array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader, - "id", - "type", -) -union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( - ( - InlineJavascriptRequirementLoader, - SchemaDefRequirementLoader, - LoadListingRequirementLoader, - DockerRequirementLoader, - SoftwareRequirementLoader, - InitialWorkDirRequirementLoader, - EnvVarRequirementLoader, - ShellCommandRequirementLoader, - ResourceRequirementLoader, - WorkReuseLoader, - NetworkAccessLoader, - InplaceUpdateRequirementLoader, - ToolTimeLimitLoader, - SubworkflowFeatureRequirementLoader, - ScatterFeatureRequirementLoader, - MultipleInputFeatureRequirementLoader, - StepInputExpressionRequirementLoader, - ) -) -array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _ArrayLoader( - union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader -) -union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( - ( - None_type, - array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - ) -) -idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _IdMapLoader( - union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - "class", - "None", -) -union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( - ( - InlineJavascriptRequirementLoader, - SchemaDefRequirementLoader, - LoadListingRequirementLoader, - DockerRequirementLoader, - SoftwareRequirementLoader, - InitialWorkDirRequirementLoader, - EnvVarRequirementLoader, - ShellCommandRequirementLoader, - ResourceRequirementLoader, - WorkReuseLoader, - NetworkAccessLoader, - InplaceUpdateRequirementLoader, - ToolTimeLimitLoader, - SubworkflowFeatureRequirementLoader, - ScatterFeatureRequirementLoader, - MultipleInputFeatureRequirementLoader, - StepInputExpressionRequirementLoader, - Any_type, - ) -) -array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _ArrayLoader( - union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type -) -union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( - ( - None_type, - array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, - ) -) -idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _IdMapLoader( - union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, - "class", - "None", -) -union_of_None_type_or_CWLVersionLoader = _UnionLoader( - ( - None_type, - CWLVersionLoader, - ) -) -uri_union_of_None_type_or_CWLVersionLoader_False_True_None = _URILoader( - union_of_None_type_or_CWLVersionLoader, False, True, None -) -InlineJavascriptRequirement_classLoader = _EnumLoader( - ("InlineJavascriptRequirement",), "InlineJavascriptRequirement_class" -) -uri_InlineJavascriptRequirement_classLoader_False_True_None = _URILoader( - InlineJavascriptRequirement_classLoader, False, True, None -) -union_of_None_type_or_array_of_strtype = _UnionLoader( - ( - None_type, - array_of_strtype, - ) -) -SchemaDefRequirement_classLoader = _EnumLoader( - ("SchemaDefRequirement",), "SchemaDefRequirement_class" -) -uri_SchemaDefRequirement_classLoader_False_True_None = _URILoader( - SchemaDefRequirement_classLoader, False, True, None -) -union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _UnionLoader( - ( - CommandInputRecordSchemaLoader, - CommandInputEnumSchemaLoader, - CommandInputArraySchemaLoader, - ) -) -array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _ArrayLoader( - union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader -) -union_of_strtype_or_ExpressionLoader = _UnionLoader( - ( - strtype, - ExpressionLoader, - ) -) -union_of_None_type_or_booltype_or_ExpressionLoader = _UnionLoader( - ( - None_type, - booltype, - ExpressionLoader, - ) -) -LoadListingRequirement_classLoader = _EnumLoader( - ("LoadListingRequirement",), "LoadListingRequirement_class" -) -uri_LoadListingRequirement_classLoader_False_True_None = _URILoader( - LoadListingRequirement_classLoader, False, True, None -) -union_of_None_type_or_inttype_or_ExpressionLoader = _UnionLoader( - ( - None_type, - inttype, - ExpressionLoader, - ) -) -union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype = _UnionLoader( - ( - None_type, - strtype, - ExpressionLoader, - array_of_strtype, - ) -) -union_of_None_type_or_ExpressionLoader = _UnionLoader( - ( - None_type, - ExpressionLoader, - ) -) -union_of_None_type_or_CommandLineBindingLoader = _UnionLoader( - ( - None_type, - CommandLineBindingLoader, - ) -) -union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - CommandInputRecordSchemaLoader, - CommandInputEnumSchemaLoader, - CommandInputArraySchemaLoader, - strtype, - ) -) -array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _ArrayLoader( - union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype -) -union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - CommandInputRecordSchemaLoader, - CommandInputEnumSchemaLoader, - CommandInputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, - 2, -) -array_of_CommandInputRecordFieldLoader = _ArrayLoader(CommandInputRecordFieldLoader) -union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _UnionLoader( - ( - None_type, - array_of_CommandInputRecordFieldLoader, - ) -) -idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( - _IdMapLoader( - union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" - ) -) -union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - CommandOutputRecordSchemaLoader, - CommandOutputEnumSchemaLoader, - CommandOutputArraySchemaLoader, - strtype, - ) -) -array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _ArrayLoader( - union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype -) -union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - CommandOutputRecordSchemaLoader, - CommandOutputEnumSchemaLoader, - CommandOutputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, - 2, -) -union_of_None_type_or_CommandOutputBindingLoader = _UnionLoader( - ( - None_type, - CommandOutputBindingLoader, - ) -) -array_of_CommandOutputRecordFieldLoader = _ArrayLoader(CommandOutputRecordFieldLoader) -union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _UnionLoader( - ( - None_type, - array_of_CommandOutputRecordFieldLoader, - ) -) -idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( - _IdMapLoader( - union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" - ) -) -union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - stdinLoader, - CommandInputRecordSchemaLoader, - CommandInputEnumSchemaLoader, - CommandInputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, - 2, -) -union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - stdoutLoader, - stderrLoader, - CommandOutputRecordSchemaLoader, - CommandOutputEnumSchemaLoader, - CommandOutputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, - 2, -) -CommandLineTool_classLoader = _EnumLoader(("CommandLineTool",), "CommandLineTool_class") -uri_CommandLineTool_classLoader_False_True_None = _URILoader( - CommandLineTool_classLoader, False, True, None -) -array_of_CommandInputParameterLoader = _ArrayLoader(CommandInputParameterLoader) -idmap_inputs_array_of_CommandInputParameterLoader = _IdMapLoader( - array_of_CommandInputParameterLoader, "id", "type" -) -array_of_CommandOutputParameterLoader = _ArrayLoader(CommandOutputParameterLoader) -idmap_outputs_array_of_CommandOutputParameterLoader = _IdMapLoader( - array_of_CommandOutputParameterLoader, "id", "type" -) -union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( - ( - strtype, - ExpressionLoader, - CommandLineBindingLoader, - ) -) -array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( - _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) -) -union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( - ( - None_type, - array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, - ) -) -array_of_inttype = _ArrayLoader(inttype) -union_of_None_type_or_array_of_inttype = _UnionLoader( - ( - None_type, - array_of_inttype, - ) -) -DockerRequirement_classLoader = _EnumLoader( - ("DockerRequirement",), "DockerRequirement_class" -) -uri_DockerRequirement_classLoader_False_True_None = _URILoader( - DockerRequirement_classLoader, False, True, None -) -SoftwareRequirement_classLoader = _EnumLoader( - ("SoftwareRequirement",), "SoftwareRequirement_class" -) -uri_SoftwareRequirement_classLoader_False_True_None = _URILoader( - SoftwareRequirement_classLoader, False, True, None -) -array_of_SoftwarePackageLoader = _ArrayLoader(SoftwarePackageLoader) -idmap_packages_array_of_SoftwarePackageLoader = _IdMapLoader( - array_of_SoftwarePackageLoader, "package", "specs" -) -uri_union_of_None_type_or_array_of_strtype_False_False_None = _URILoader( - union_of_None_type_or_array_of_strtype, False, False, None -) -InitialWorkDirRequirement_classLoader = _EnumLoader( - ("InitialWorkDirRequirement",), "InitialWorkDirRequirement_class" -) -uri_InitialWorkDirRequirement_classLoader_False_True_None = _URILoader( - InitialWorkDirRequirement_classLoader, False, True, None -) -union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader = _UnionLoader( - ( - None_type, - FileLoader, - array_of_union_of_FileLoader_or_DirectoryLoader, - DirectoryLoader, - DirentLoader, - ExpressionLoader, - ) -) -array_of_union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader = _ArrayLoader( - union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader -) -union_of_array_of_union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader_or_ExpressionLoader = _UnionLoader( - ( - array_of_union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader, - ExpressionLoader, - ) -) -EnvVarRequirement_classLoader = _EnumLoader( - ("EnvVarRequirement",), "EnvVarRequirement_class" -) -uri_EnvVarRequirement_classLoader_False_True_None = _URILoader( - EnvVarRequirement_classLoader, False, True, None -) -array_of_EnvironmentDefLoader = _ArrayLoader(EnvironmentDefLoader) -idmap_envDef_array_of_EnvironmentDefLoader = _IdMapLoader( - array_of_EnvironmentDefLoader, "envName", "envValue" -) -ShellCommandRequirement_classLoader = _EnumLoader( - ("ShellCommandRequirement",), "ShellCommandRequirement_class" -) -uri_ShellCommandRequirement_classLoader_False_True_None = _URILoader( - ShellCommandRequirement_classLoader, False, True, None -) -ResourceRequirement_classLoader = _EnumLoader( - ("ResourceRequirement",), "ResourceRequirement_class" -) -uri_ResourceRequirement_classLoader_False_True_None = _URILoader( - ResourceRequirement_classLoader, False, True, None -) -WorkReuse_classLoader = _EnumLoader(("WorkReuse",), "WorkReuse_class") -uri_WorkReuse_classLoader_False_True_None = _URILoader( - WorkReuse_classLoader, False, True, None -) -union_of_booltype_or_ExpressionLoader = _UnionLoader( - ( - booltype, - ExpressionLoader, - ) -) -NetworkAccess_classLoader = _EnumLoader(("NetworkAccess",), "NetworkAccess_class") -uri_NetworkAccess_classLoader_False_True_None = _URILoader( - NetworkAccess_classLoader, False, True, None -) -InplaceUpdateRequirement_classLoader = _EnumLoader( - ("InplaceUpdateRequirement",), "InplaceUpdateRequirement_class" -) -uri_InplaceUpdateRequirement_classLoader_False_True_None = _URILoader( - InplaceUpdateRequirement_classLoader, False, True, None -) -ToolTimeLimit_classLoader = _EnumLoader(("ToolTimeLimit",), "ToolTimeLimit_class") -uri_ToolTimeLimit_classLoader_False_True_None = _URILoader( - ToolTimeLimit_classLoader, False, True, None -) -union_of_inttype_or_ExpressionLoader = _UnionLoader( - ( - inttype, - ExpressionLoader, - ) -) -union_of_None_type_or_InputBindingLoader = _UnionLoader( - ( - None_type, - InputBindingLoader, - ) -) -ExpressionTool_classLoader = _EnumLoader(("ExpressionTool",), "ExpressionTool_class") -uri_ExpressionTool_classLoader_False_True_None = _URILoader( - ExpressionTool_classLoader, False, True, None -) -array_of_WorkflowInputParameterLoader = _ArrayLoader(WorkflowInputParameterLoader) -idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader( - array_of_WorkflowInputParameterLoader, "id", "type" -) -array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( - ExpressionToolOutputParameterLoader -) -idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( - array_of_ExpressionToolOutputParameterLoader, "id", "type" -) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1 = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1 -) -union_of_None_type_or_LinkMergeMethodLoader = _UnionLoader( - ( - None_type, - LinkMergeMethodLoader, - ) -) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2 = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2 -) -array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader) -idmap_in__array_of_WorkflowStepInputLoader = _IdMapLoader( - array_of_WorkflowStepInputLoader, "id", "source" -) -union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( - ( - strtype, - WorkflowStepOutputLoader, - ) -) -array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader( - union_of_strtype_or_WorkflowStepOutputLoader -) -union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( - (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) -) -uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = ( - _URILoader( - union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, - True, - False, - None, - ) -) -array_of_Any_type = _ArrayLoader(Any_type) -union_of_None_type_or_array_of_Any_type = _UnionLoader( - ( - None_type, - array_of_Any_type, - ) -) -idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( - union_of_None_type_or_array_of_Any_type, "class", "None" -) -union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( - _UnionLoader( - ( - strtype, - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, - ) - ) -) -uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None = _URILoader( - union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, - False, - False, - None, -) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0 = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 0 -) -union_of_None_type_or_ScatterMethodLoader = _UnionLoader( - ( - None_type, - ScatterMethodLoader, - ) -) -uri_union_of_None_type_or_ScatterMethodLoader_False_True_None = _URILoader( - union_of_None_type_or_ScatterMethodLoader, False, True, None -) -Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") -uri_Workflow_classLoader_False_True_None = _URILoader( - Workflow_classLoader, False, True, None -) -array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) -idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( - array_of_WorkflowOutputParameterLoader, "id", "type" -) -array_of_WorkflowStepLoader = _ArrayLoader(WorkflowStepLoader) -union_of_array_of_WorkflowStepLoader = _UnionLoader((array_of_WorkflowStepLoader,)) -idmap_steps_union_of_array_of_WorkflowStepLoader = _IdMapLoader( - union_of_array_of_WorkflowStepLoader, "id", "None" -) -SubworkflowFeatureRequirement_classLoader = _EnumLoader( - ("SubworkflowFeatureRequirement",), "SubworkflowFeatureRequirement_class" -) -uri_SubworkflowFeatureRequirement_classLoader_False_True_None = _URILoader( - SubworkflowFeatureRequirement_classLoader, False, True, None -) -ScatterFeatureRequirement_classLoader = _EnumLoader( - ("ScatterFeatureRequirement",), "ScatterFeatureRequirement_class" -) -uri_ScatterFeatureRequirement_classLoader_False_True_None = _URILoader( - ScatterFeatureRequirement_classLoader, False, True, None -) -MultipleInputFeatureRequirement_classLoader = _EnumLoader( - ("MultipleInputFeatureRequirement",), "MultipleInputFeatureRequirement_class" -) -uri_MultipleInputFeatureRequirement_classLoader_False_True_None = _URILoader( - MultipleInputFeatureRequirement_classLoader, False, True, None -) -StepInputExpressionRequirement_classLoader = _EnumLoader( - ("StepInputExpressionRequirement",), "StepInputExpressionRequirement_class" -) -uri_StepInputExpressionRequirement_classLoader_False_True_None = _URILoader( - StepInputExpressionRequirement_classLoader, False, True, None -) -union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( - ( - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, - ) -) -array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = ( - _ArrayLoader( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader - ) -) -union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader = _UnionLoader( - ( - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, - array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, - ) -) - - -def load_document( - doc: Any, - baseuri: Optional[str] = None, - loadingOptions: Optional[LoadingOptions] = None, -) -> Any: - if baseuri is None: - baseuri = file_uri(os.getcwd()) + "/" - if loadingOptions is None: - loadingOptions = LoadingOptions() - result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, - doc, - baseuri, - loadingOptions, - ) - return result - - -def load_document_with_metadata( - doc: Any, - baseuri: Optional[str] = None, - loadingOptions: Optional[LoadingOptions] = None, - addl_metadata_fields: Optional[MutableSequence[str]] = None, -) -> Any: - if baseuri is None: - baseuri = file_uri(os.getcwd()) + "/" - if loadingOptions is None: - loadingOptions = LoadingOptions(fileuri=baseuri) - return _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, - doc, - baseuri, - loadingOptions, - addl_metadata_fields=addl_metadata_fields, - ) - - -def load_document_by_string( - string: Any, - uri: str, - loadingOptions: Optional[LoadingOptions] = None, -) -> Any: - yaml = yaml_no_ts() - result = yaml.load(string) - add_lc_filename(result, uri) - - if loadingOptions is None: - loadingOptions = LoadingOptions(fileuri=uri) - - result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, - result, - uri, - loadingOptions, - ) - return result - - -def load_document_by_yaml( - yaml: Any, - uri: str, - loadingOptions: Optional[LoadingOptions] = None, -) -> Any: - """ - Shortcut to load via a YAML object. - yaml: must be from ruamel.yaml.main.YAML.load with preserve_quotes=True - """ - add_lc_filename(yaml, uri) - - if loadingOptions is None: - loadingOptions = LoadingOptions(fileuri=uri) - - result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, - yaml, - uri, - loadingOptions, - ) - return result diff --git a/schema_salad/tests/cwl_v1_2.py b/schema_salad/tests/cwl_v1_2.py deleted file mode 100644 index e53ecd07c..000000000 --- a/schema_salad/tests/cwl_v1_2.py +++ /dev/null @@ -1,26303 +0,0 @@ -# -# This file was autogenerated using schema-salad-tool --codegen=python -# The code itself is released under the Apache 2.0 license and the help text is -# subject to the license of the original schema. -import copy -import logging -import os -import pathlib -import re -import tempfile -import uuid as _uuid__ # pylint: disable=unused-import # noqa: F401 -import xml.sax # nosec -from abc import ABC, abstractmethod -from io import StringIO -from typing import ( - Any, - Dict, - List, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, - no_type_check, -) -from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit -from urllib.request import pathname2url - -from rdflib import Graph -from rdflib.plugins.parsers.notation3 import BadSyntax -from ruamel.yaml.comments import CommentedMap, CommentedSeq - -from schema_salad.exceptions import SchemaSaladException, ValidationException -from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher -from schema_salad.sourceline import SourceLine, add_lc_filename -from schema_salad.utils import CacheType, yaml_no_ts # requires schema-salad v8.2+ - -_vocab: Dict[str, str] = {} -_rvocab: Dict[str, str] = {} - -_logger = logging.getLogger("salad") - - -IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] - -doc_line_info = CommentedMap() - - -class LoadingOptions: - idx: IdxType - fileuri: Optional[str] - baseuri: str - namespaces: MutableMapping[str, str] - schemas: MutableSequence[str] - original_doc: Optional[Any] - addl_metadata: MutableMapping[str, Any] - fetcher: Fetcher - vocab: Dict[str, str] - rvocab: Dict[str, str] - cache: CacheType - imports: List[str] - includes: List[str] - - def __init__( - self, - fetcher: Optional[Fetcher] = None, - namespaces: Optional[Dict[str, str]] = None, - schemas: Optional[List[str]] = None, - fileuri: Optional[str] = None, - copyfrom: Optional["LoadingOptions"] = None, - original_doc: Optional[Any] = None, - addl_metadata: Optional[Dict[str, str]] = None, - baseuri: Optional[str] = None, - idx: Optional[IdxType] = None, - imports: Optional[List[str]] = None, - includes: Optional[List[str]] = None, - ) -> None: - """Create a LoadingOptions object.""" - self.original_doc = original_doc - - if idx is not None: - self.idx = idx - else: - self.idx = copyfrom.idx if copyfrom is not None else {} - - if fileuri is not None: - self.fileuri = fileuri - else: - self.fileuri = copyfrom.fileuri if copyfrom is not None else None - - if baseuri is not None: - self.baseuri = baseuri - else: - self.baseuri = copyfrom.baseuri if copyfrom is not None else "" - - if namespaces is not None: - self.namespaces = namespaces - else: - self.namespaces = copyfrom.namespaces if copyfrom is not None else {} - - if schemas is not None: - self.schemas = schemas - else: - self.schemas = copyfrom.schemas if copyfrom is not None else [] - - if addl_metadata is not None: - self.addl_metadata = addl_metadata - else: - self.addl_metadata = copyfrom.addl_metadata if copyfrom is not None else {} - - if imports is not None: - self.imports = imports - else: - self.imports = copyfrom.imports if copyfrom is not None else [] - - if includes is not None: - self.includes = includes - else: - self.includes = copyfrom.includes if copyfrom is not None else [] - - if fetcher is not None: - self.fetcher = fetcher - elif copyfrom is not None: - self.fetcher = copyfrom.fetcher - else: - import requests - from cachecontrol.caches import FileCache - from cachecontrol.wrapper import CacheControl - - root = pathlib.Path(os.environ.get("HOME", tempfile.gettempdir())) - session = CacheControl( - requests.Session(), - cache=FileCache(root / ".cache" / "salad"), - ) - self.fetcher: Fetcher = DefaultFetcher({}, session) - - self.cache = self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} - - self.vocab = _vocab - self.rvocab = _rvocab - - if namespaces is not None: - self.vocab = self.vocab.copy() - self.rvocab = self.rvocab.copy() - for k, v in namespaces.items(): - self.vocab[k] = v - self.rvocab[v] = k - - @property - def graph(self) -> Graph: - """Generate a merged rdflib.Graph from all entries in self.schemas.""" - graph = Graph() - if not self.schemas: - return graph - key = str(hash(tuple(self.schemas))) - if key in self.cache: - return cast(Graph, self.cache[key]) - for schema in self.schemas: - fetchurl = ( - self.fetcher.urljoin(self.fileuri, schema) - if self.fileuri is not None - else pathlib.Path(schema).resolve().as_uri() - ) - if fetchurl not in self.cache or self.cache[fetchurl] is True: - _logger.debug("Getting external schema %s", fetchurl) - try: - content = self.fetcher.fetch_text(fetchurl) - except Exception as e: - _logger.warning("Could not load extension schema %s: %s", fetchurl, str(e)) - continue - newGraph = Graph() - err_msg = "unknown error" - for fmt in ["xml", "turtle"]: - try: - newGraph.parse(data=content, format=fmt, publicID=str(fetchurl)) - self.cache[fetchurl] = newGraph - graph += newGraph - break - except (xml.sax.SAXParseException, TypeError, BadSyntax) as e: - err_msg = str(e) - else: - _logger.warning("Could not load extension schema %s: %s", fetchurl, err_msg) - self.cache[key] = graph - return graph - - -class Saveable(ABC): - """Mark classes than have a save() and fromDoc() function.""" - - @classmethod - @abstractmethod - def fromDoc( - cls, - _doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "Saveable": - """Construct this object from the result of yaml.load().""" - - @abstractmethod - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, - ) -> CommentedMap: - """Convert this object to a JSON/YAML friendly dictionary.""" - - -def load_field(val, fieldtype, baseuri, loadingOptions): - # type: (Union[str, Dict[str, str]], _Loader, str, LoadingOptions) -> Any - if isinstance(val, MutableMapping): - if "$import" in val: - if loadingOptions.fileuri is None: - raise SchemaSaladException("Cannot load $import without fileuri") - url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]) - result, metadata = _document_load_by_url( - fieldtype, - url, - loadingOptions, - ) - loadingOptions.imports.append(url) - return result - if "$include" in val: - if loadingOptions.fileuri is None: - raise SchemaSaladException("Cannot load $import without fileuri") - url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) - val = loadingOptions.fetcher.fetch_text(url) - loadingOptions.includes.append(url) - return fieldtype.load(val, baseuri, loadingOptions) - - -save_type = Optional[Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str]] - - -def add_kv( - old_doc: CommentedMap, - new_doc: CommentedMap, - line_numbers: Dict[Any, Dict[str, int]], - key: str, - val: Any, - max_len: int, - cols: Dict[int, int], - min_col: int = 0, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, -) -> Tuple[int, Optional[Dict[int, int]]]: - """Add key value pair into Commented Map. - - Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers - for each key/val pair in the old CommentedMap,key/val pair to insert, max_line of the old CommentedMap, - and max col value taken for each line. - """ - if inserted_line_info is None: - inserted_line_info = {} - - if len(inserted_line_info.keys()) >= 1: - max_line = max(inserted_line_info.keys()) + 1 - else: - max_line = 0 - - if key in line_numbers: # If the passed key to insert is in the original CommentedMap as a key - line_info = old_doc.lc.data[key] # Get the line information for the key - if ( - line_info[0] + shift not in inserted_line_info - ): # If the line of the key + shift isn't taken, add it - new_doc.lc.add_kv_line_col( - key, - [ - old_doc.lc.data[key][0] + shift, - old_doc.lc.data[key][1], - old_doc.lc.data[key][2] + shift, - old_doc.lc.data[key][3], - ], - ) - inserted_line_info[old_doc.lc.data[key][0] + shift] = old_doc.lc.data[key][1] - else: # If the line is already taken - line = line_info[0] + shift - while line in inserted_line_info.keys(): # Find the closest free line - line += 1 - new_doc.lc.add_kv_line_col( - key, - [ - line, - old_doc.lc.data[key][1], - line + (line - old_doc.lc.data[key][2]), - old_doc.lc.data[key][3], - ], - ) - inserted_line_info[line] = old_doc.lc.data[key][1] - return max_len, inserted_line_info - elif isinstance(val, (int, float, str)) and not isinstance( - val, bool - ): # If the value is hashable - if val in line_numbers: # If the value is in the original CommentedMap - line = line_numbers[val]["line"] + shift # Get the line info for the value - if line in inserted_line_info: # Get the appropriate line to place value on - line = max_line - - col = line_numbers[val]["col"] - new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) - inserted_line_info[line] = col + len(key) + 2 - return max_len, inserted_line_info - elif isinstance(val, str): # Logic for DSL expansition with "?" - if val + "?" in line_numbers: - line = line_numbers[val + "?"]["line"] + shift - if line in inserted_line_info: - line = max_line - col = line_numbers[val + "?"]["col"] - new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) - inserted_line_info[line] = col + len(key) + 2 - return max_len, inserted_line_info - elif old_doc: - if val in old_doc: - index = old_doc.lc.data.index(val) - line_info = old_doc.lc.data[index] - if line_info[0] + shift not in inserted_line_info: - new_doc.lc.add_kv_line_col( - key, - [ - old_doc.lc.data[index][0] + shift, - old_doc.lc.data[index][1], - old_doc.lc.data[index][2] + shift, - old_doc.lc.data[index][3], - ], - ) - inserted_line_info[old_doc.lc.data[index][0] + shift] = old_doc.lc.data[index][ - 1 - ] - else: - new_doc.lc.add_kv_line_col( - key, - [ - max_line + shift, - old_doc.lc.data[index][1], - max_line + (max_line - old_doc.lc.data[index][2]) + shift, - old_doc.lc.data[index][3], - ], - ) - inserted_line_info[max_line + shift] = old_doc.lc.data[index][1] - # If neither the key or value is in the original CommentedMap/old doc (or value is not hashable) - new_doc.lc.add_kv_line_col(key, [max_line, min_col, max_line, min_col + len(key) + 2]) - inserted_line_info[max_line] = min_col + len(key) + 2 - return max_len + 1, inserted_line_info - - -@no_type_check -def iterate_through_doc(keys: List[Any]) -> Optional[CommentedMap]: - """Take a list of keys/indexes and iterates through the global CommentedMap.""" - doc = doc_line_info - for key in keys: - if isinstance(doc, CommentedMap): - doc = doc.get(key) - elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): - if key < len(doc): - doc = doc[key] - else: - return None - else: - return None - if isinstance(doc, CommentedSeq): - to_return = CommentedMap() - for index, key in enumerate(doc): - to_return[key] = "" - to_return.lc.add_kv_line_col( - key, - [ - doc.lc.data[index][0], - doc.lc.data[index][1], - doc.lc.data[index][0], - doc.lc.data[index][1], - ], - ) - return to_return - return doc - - -def get_line_numbers(doc: Optional[CommentedMap]) -> Dict[Any, Dict[str, int]]: - """Get line numbers for kv pairs in CommentedMap. - - For each key/value pair in a CommentedMap, save the line/col info into a dictionary, - only save value info if value is hashable. - """ - line_numbers: Dict[Any, Dict[str, int]] = {} - if doc is None: - return {} - if doc.lc.data is None: - return {} - for key, value in doc.lc.data.items(): - line_numbers[key] = {} - - line_numbers[key]["line"] = doc.lc.data[key][0] - line_numbers[key]["col"] = doc.lc.data[key][1] - if isinstance(value, (int, float, bool, str)): - line_numbers[value] = {} - line_numbers[value]["line"] = doc.lc.data[key][2] - line_numbers[value]["col"] = doc.lc.data[key][3] - return line_numbers - - -def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> int: - """Given a array of line column information, get the minimum column.""" - min_col = 0 - for line in line_numbers: - if line_numbers[line]["col"] > min_col: - min_col = line_numbers[line]["col"] - return min_col - - -def get_max_line_num(doc: CommentedMap) -> int: - """Get the max line number for a CommentedMap. - - Iterate through the the key with the highest line number until you reach a non-CommentedMap value - or empty CommentedMap. - """ - max_line = 0 - max_key = "" - cur = doc - while isinstance(cur, CommentedMap) and len(cur) > 0: - for key in cur.lc.data.keys(): - if cur.lc.data[key][2] >= max_line: - max_line = cur.lc.data[key][2] - max_key = key - cur = cur[max_key] - return max_line + 1 - - -def save( - val: Any, - top: bool = True, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0, -) -> save_type: - """Save a val of any type. - - Recursively calls save method from class if val is of type Saveable. - Otherwise, saves val to CommentedMap or CommentedSeq. - """ - if keys is None: - keys = [] - - doc = iterate_through_doc(keys) - - if isinstance(val, Saveable): - return val.save( - top=top, - base_url=base_url, - relative_uris=relative_uris, - keys=keys, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if isinstance(val, MutableSequence): - r = CommentedSeq() - r.lc.data = {} - for i in range(0, len(val)): - new_keys = keys - if doc: - if str(i) in doc: - r.lc.data[i] = doc.lc.data[i] - new_keys.append(i) - r.append( - save( - val[i], - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=new_keys, - inserted_line_info=inserted_line_info, - shift=shift, - ) - ) - return r - - if isinstance(val, MutableMapping): - newdict = CommentedMap() - new_keys = keys - for key in val: - - if doc: - if key in doc: - newdict.lc.add_kv_line_col(key, doc.lc.data[key]) - new_keys.append(key) - - newdict[key] = save( - val[key], - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=new_keys, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - return newdict - if val is None or isinstance(val, (int, float, bool, str)): - return val - raise Exception("Not Saveable: %s" % type(val)) - - -def save_with_metadata( - val: Any, - valLoadingOpts: LoadingOptions, - top: bool = True, - base_url: str = "", - relative_uris: bool = True, -) -> save_type: - """Save and set $namespaces, $schemas, $base and any other metadata fields at the top level.""" - saved_val = save(val, top, base_url, relative_uris) - newdict: MutableMapping[str, Any] = {} - if isinstance(saved_val, MutableSequence): - newdict = {"$graph": saved_val} - elif isinstance(saved_val, MutableMapping): - newdict = saved_val - - if valLoadingOpts.namespaces: - newdict["$namespaces"] = valLoadingOpts.namespaces - if valLoadingOpts.schemas: - newdict["$schemas"] = valLoadingOpts.schemas - if valLoadingOpts.baseuri: - newdict["$base"] = valLoadingOpts.baseuri - for k, v in valLoadingOpts.addl_metadata.items(): - if k not in newdict: - newdict[k] = v - - return newdict - - -def expand_url( - url, # type: str - base_url, # type: str - loadingOptions, # type: LoadingOptions - scoped_id=False, # type: bool - vocab_term=False, # type: bool - scoped_ref=None, # type: Optional[int] -): - # type: (...) -> str - if url in ("@id", "@type"): - return url - - if vocab_term and url in loadingOptions.vocab: - return url - - if bool(loadingOptions.vocab) and ":" in url: - prefix = url.split(":")[0] - if prefix in loadingOptions.vocab: - url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :] - - split = urlsplit(url) - - if ( - (bool(split.scheme) and split.scheme in loadingOptions.fetcher.supported_schemes()) - or url.startswith("$(") - or url.startswith("${") - ): - pass - elif scoped_id and not bool(split.fragment): - splitbase = urlsplit(base_url) - frg = "" - if bool(splitbase.fragment): - frg = splitbase.fragment + "/" + split.path - else: - frg = split.path - pt = splitbase.path if splitbase.path != "" else "/" - url = urlunsplit((splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg)) - elif scoped_ref is not None and not bool(split.fragment): - splitbase = urlsplit(base_url) - sp = splitbase.fragment.split("/") - n = scoped_ref - while n > 0 and len(sp) > 0: - sp.pop() - n -= 1 - sp.append(url) - url = urlunsplit( - ( - splitbase.scheme, - splitbase.netloc, - splitbase.path, - splitbase.query, - "/".join(sp), - ) - ) - else: - url = loadingOptions.fetcher.urljoin(base_url, url) - - if vocab_term: - split = urlsplit(url) - if bool(split.scheme): - if url in loadingOptions.rvocab: - return loadingOptions.rvocab[url] - else: - raise ValidationException(f"Term {url!r} not in vocabulary") - - return url - - -class _Loader: - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - pass - - -class _AnyLoader(_Loader): - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if doc is not None: - return doc - raise ValidationException("Expected non-null") - - -class _PrimitiveLoader(_Loader): - def __init__(self, tp): - # type: (Union[type, Tuple[Type[str], Type[str]]]) -> None - self.tp = tp - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if not isinstance(doc, self.tp): - raise ValidationException( - "Expected a {} but got {}".format( - self.tp.__class__.__name__, doc.__class__.__name__ - ) - ) - return doc - - def __repr__(self): # type: () -> str - return str(self.tp) - - -class _ArrayLoader(_Loader): - def __init__(self, items): - # type: (_Loader) -> None - self.items = items - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if not isinstance(doc, MutableSequence): - raise ValidationException(f"Expected a list, was {type(doc)}") - r = [] # type: List[Any] - errors = [] # type: List[SchemaSaladException] - for i in range(0, len(doc)): - try: - lf = load_field(doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions) - if isinstance(lf, MutableSequence): - r.extend(lf) - else: - r.append(lf) - except ValidationException as e: - errors.append(e.with_sourceline(SourceLine(doc, i, str))) - if errors: - raise ValidationException("", None, errors) - return r - - def __repr__(self): # type: () -> str - return f"array<{self.items}>" - - -class _EnumLoader(_Loader): - def __init__(self, symbols: Sequence[str], name: str) -> None: - self.symbols = symbols - self.name = name - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if doc in self.symbols: - return doc - raise ValidationException(f"Expected one of {self.symbols}") - - def __repr__(self): # type: () -> str - return self.name - - -class _SecondaryDSLLoader(_Loader): - def __init__(self, inner): - # type: (_Loader) -> None - self.inner = inner - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - r: List[Dict[str, Any]] = [] - if isinstance(doc, MutableSequence): - for d in doc: - if isinstance(d, str): - if d.endswith("?"): - r.append({"pattern": d[:-1], "required": False}) - else: - r.append({"pattern": d}) - elif isinstance(d, dict): - new_dict: Dict[str, Any] = {} - dict_copy = copy.deepcopy(d) - if "pattern" in dict_copy: - new_dict["pattern"] = dict_copy.pop("pattern") - else: - raise ValidationException( - f"Missing pattern in secondaryFiles specification entry: {d}" - ) - new_dict["required"] = ( - dict_copy.pop("required") if "required" in dict_copy else None - ) - - if len(dict_copy): - raise ValidationException( - "Unallowed values in secondaryFiles specification entry: {}".format( - dict_copy - ) - ) - r.append(new_dict) - - else: - raise ValidationException( - "Expected a string or sequence of (strings or mappings)." - ) - elif isinstance(doc, MutableMapping): - new_dict = {} - doc_copy = copy.deepcopy(doc) - if "pattern" in doc_copy: - new_dict["pattern"] = doc_copy.pop("pattern") - else: - raise ValidationException( - f"Missing pattern in secondaryFiles specification entry: {doc}" - ) - new_dict["required"] = doc_copy.pop("required") if "required" in doc_copy else None - - if len(doc_copy): - raise ValidationException( - f"Unallowed values in secondaryFiles specification entry: {doc_copy}" - ) - r.append(new_dict) - - elif isinstance(doc, str): - if doc.endswith("?"): - r.append({"pattern": doc[:-1], "required": False}) - else: - r.append({"pattern": doc}) - else: - raise ValidationException("Expected str or sequence of str") - return self.inner.load(r, baseuri, loadingOptions, docRoot) - - -class _RecordLoader(_Loader): - def __init__(self, classtype): - # type: (Type[Saveable]) -> None - self.classtype = classtype - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if not isinstance(doc, MutableMapping): - raise ValidationException(f"Expected a dict, was {type(doc)}") - return self.classtype.fromDoc(doc, baseuri, loadingOptions, docRoot=docRoot) - - def __repr__(self): # type: () -> str - return str(self.classtype.__name__) - - -class _ExpressionLoader(_Loader): - def __init__(self, items: Type[str]) -> None: - self.items = items - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if not isinstance(doc, str): - raise ValidationException(f"Expected a str, was {type(doc)}") - return doc - - -class _UnionLoader(_Loader): - def __init__(self, alternates: Sequence[_Loader]) -> None: - self.alternates = alternates - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - errors = [] - for t in self.alternates: - try: - return t.load(doc, baseuri, loadingOptions, docRoot=docRoot) - except ValidationException as e: - errors.append(ValidationException(f"tried {t} but", None, [e])) - raise ValidationException("", None, errors, "-") - - def __repr__(self): # type: () -> str - return " | ".join(str(a) for a in self.alternates) - - -class _URILoader(_Loader): - def __init__(self, inner, scoped_id, vocab_term, scoped_ref): - # type: (_Loader, bool, bool, Union[int, None]) -> None - self.inner = inner - self.scoped_id = scoped_id - self.vocab_term = vocab_term - self.scoped_ref = scoped_ref - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if isinstance(doc, MutableSequence): - newdoc = [] - for i in doc: - if isinstance(i, str): - newdoc.append( - expand_url( - i, - baseuri, - loadingOptions, - self.scoped_id, - self.vocab_term, - self.scoped_ref, - ) - ) - else: - newdoc.append(i) - doc = newdoc - elif isinstance(doc, str): - doc = expand_url( - doc, - baseuri, - loadingOptions, - self.scoped_id, - self.vocab_term, - self.scoped_ref, - ) - return self.inner.load(doc, baseuri, loadingOptions) - - -class _TypeDSLLoader(_Loader): - typeDSLregex = re.compile(r"^([^[?]+)(\[\])?(\?)?$") - - def __init__(self, inner, refScope): - # type: (_Loader, Union[int, None]) -> None - self.inner = inner - self.refScope = refScope - - def resolve( - self, - doc, # type: str - baseuri, # type: str - loadingOptions, # type: LoadingOptions - ): - # type: (...) -> Union[List[Union[Dict[str, str], str]], Dict[str, str], str] - m = self.typeDSLregex.match(doc) - if m: - group1 = m.group(1) - assert group1 is not None # nosec - first = expand_url(group1, baseuri, loadingOptions, False, True, self.refScope) - second = third = None - if bool(m.group(2)): - second = {"type": "array", "items": first} - # second = CommentedMap((("type", "array"), - # ("items", first))) - # second.lc.add_kv_line_col("type", lc) - # second.lc.add_kv_line_col("items", lc) - # second.lc.filename = filename - if bool(m.group(3)): - third = ["null", second or first] - # third = CommentedSeq(["null", second or first]) - # third.lc.add_kv_line_col(0, lc) - # third.lc.add_kv_line_col(1, lc) - # third.lc.filename = filename - return third or second or first - return doc - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if isinstance(doc, MutableSequence): - r = [] # type: List[Any] - for d in doc: - if isinstance(d, str): - resolved = self.resolve(d, baseuri, loadingOptions) - if isinstance(resolved, MutableSequence): - for i in resolved: - if i not in r: - r.append(i) - else: - if resolved not in r: - r.append(resolved) - else: - r.append(d) - doc = r - elif isinstance(doc, str): - doc = self.resolve(doc, baseuri, loadingOptions) - - return self.inner.load(doc, baseuri, loadingOptions) - - -class _IdMapLoader(_Loader): - def __init__(self, inner, mapSubject, mapPredicate): - # type: (_Loader, str, Union[str, None]) -> None - self.inner = inner - self.mapSubject = mapSubject - self.mapPredicate = mapPredicate - - def load(self, doc, baseuri, loadingOptions, docRoot=None): - # type: (Any, str, LoadingOptions, Optional[str]) -> Any - if isinstance(doc, MutableMapping): - r = [] # type: List[Any] - for k in sorted(doc.keys()): - val = doc[k] - if isinstance(val, CommentedMap): - v = copy.copy(val) - v.lc.data = val.lc.data - v.lc.filename = val.lc.filename - v[self.mapSubject] = k - r.append(v) - elif isinstance(val, MutableMapping): - v2 = copy.copy(val) - v2[self.mapSubject] = k - r.append(v2) - else: - if self.mapPredicate: - v3 = {self.mapPredicate: val} - v3[self.mapSubject] = k - r.append(v3) - else: - raise ValidationException("No mapPredicate") - doc = r - return self.inner.load(doc, baseuri, loadingOptions) - - -def _document_load( - loader: _Loader, - doc: Union[CommentedMap, str, MutableMapping[str, Any], MutableSequence[Any]], - baseuri: str, - loadingOptions: LoadingOptions, - addl_metadata_fields: Optional[MutableSequence[str]] = None, -) -> Tuple[Any, LoadingOptions]: - if isinstance(doc, str): - return _document_load_by_url( - loader, - loadingOptions.fetcher.urljoin(baseuri, doc), - loadingOptions, - addl_metadata_fields=addl_metadata_fields, - ) - - if isinstance(doc, MutableMapping): - addl_metadata = {} - if addl_metadata_fields is not None: - for mf in addl_metadata_fields: - if mf in doc: - addl_metadata[mf] = doc[mf] - - docuri = baseuri - if "$base" in doc: - baseuri = doc["$base"] - - loadingOptions = LoadingOptions( - copyfrom=loadingOptions, - namespaces=doc.get("$namespaces", None), - schemas=doc.get("$schemas", None), - baseuri=doc.get("$base", None), - addl_metadata=addl_metadata, - ) - - doc = copy.copy(doc) - if "$namespaces" in doc: - doc.pop("$namespaces") - if "$schemas" in doc: - doc.pop("$schemas") - if "$base" in doc: - doc.pop("$base") - - if isinstance(doc, CommentedMap): - global doc_line_info - doc_line_info = doc - - if "$graph" in doc: - loadingOptions.idx[baseuri] = ( - loader.load(doc["$graph"], baseuri, loadingOptions), - loadingOptions, - ) - else: - loadingOptions.idx[baseuri] = ( - loader.load(doc, baseuri, loadingOptions, docRoot=baseuri), - loadingOptions, - ) - - if docuri != baseuri: - loadingOptions.idx[docuri] = loadingOptions.idx[baseuri] - - return loadingOptions.idx[baseuri] - if isinstance(doc, MutableSequence): - loadingOptions.idx[baseuri] = ( - loader.load(doc, baseuri, loadingOptions), - loadingOptions, - ) - return loadingOptions.idx[baseuri] - - raise ValidationException( - "Expected URI string, MutableMapping or MutableSequence, got %s" % type(doc) - ) - - -def _document_load_by_url( - loader: _Loader, - url: str, - loadingOptions: LoadingOptions, - addl_metadata_fields: Optional[MutableSequence[str]] = None, -) -> Tuple[Any, LoadingOptions]: - if url in loadingOptions.idx: - return loadingOptions.idx[url] - - doc_url, frg = urldefrag(url) - - text = loadingOptions.fetcher.fetch_text(doc_url) - textIO = StringIO(text) - textIO.name = str(doc_url) - yaml = yaml_no_ts() - result = yaml.load(textIO) - add_lc_filename(result, doc_url) - - loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=doc_url) - - _document_load( - loader, - result, - doc_url, - loadingOptions, - addl_metadata_fields=addl_metadata_fields, - ) - - return loadingOptions.idx[url] - - -def file_uri(path, split_frag=False): # type: (str, bool) -> str - if path.startswith("file://"): - return path - if split_frag: - pathsp = path.split("#", 2) - frag = "#" + quote(str(pathsp[1])) if len(pathsp) == 2 else "" - urlpath = pathname2url(str(pathsp[0])) - else: - urlpath = pathname2url(path) - frag = "" - if urlpath.startswith("//"): - return f"file:{urlpath}{frag}" - return f"file://{urlpath}{frag}" - - -def prefix_url(url: str, namespaces: Dict[str, str]) -> str: - """Expand short forms into full URLs using the given namespace dictionary.""" - for k, v in namespaces.items(): - if url.startswith(v): - return k + ":" + url[len(v) :] - return url - - -def save_relative_uri( - uri: Any, - base_url: str, - scoped_id: bool, - ref_scope: Optional[int], - relative_uris: bool, -) -> Any: - """Convert any URI to a relative one, obeying the scoping rules.""" - if isinstance(uri, MutableSequence): - return [save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) for u in uri] - elif isinstance(uri, str): - if not relative_uris or uri == base_url: - return uri - urisplit = urlsplit(uri) - basesplit = urlsplit(base_url) - if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: - if urisplit.path != basesplit.path: - p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) - if urisplit.fragment: - p = p + "#" + urisplit.fragment - return p - - basefrag = basesplit.fragment + "/" - if ref_scope: - sp = basefrag.split("/") - i = 0 - while i < ref_scope: - sp.pop() - i += 1 - basefrag = "/".join(sp) - - if urisplit.fragment.startswith(basefrag): - return urisplit.fragment[len(basefrag) :] - return urisplit.fragment - return uri - else: - return save(uri, top=False, base_url=base_url, relative_uris=relative_uris) - - -def shortname(inputid: str) -> str: - """ - Compute the shortname of a fully qualified identifier. - - See https://w3id.org/cwl/v1.2/SchemaSalad.html#Short_names. - """ - parsed_id = urlparse(inputid) - if parsed_id.fragment: - return parsed_id.fragment.split("/")[-1] - return parsed_id.path.split("/")[-1] - - -def parser_info() -> str: - return "org.w3id.cwl.v1_2" - - -class Documented(Saveable): - pass - - -class RecordField(Documented): - """ - A field of a record. - """ - - def __init__( - self, - name: Any, - type: Any, - doc: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.doc = doc - self.name = name - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, RecordField): - return bool( - self.doc == other.doc - and self.name == other.name - and self.type == other.type - ) - return False - - def __hash__(self) -> int: - return hash((self.doc, self.name, self.type)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "RecordField": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - raise ValidationException("Missing name") - if not __original_name_is_none: - baseuri = name - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'RecordField'", None, _errors__) - _constructed = cls( - doc=doc, - name=name, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["doc", "name", "type"]) - - -class RecordSchema(Saveable): - def __init__( - self, - type: Any, - fields: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, RecordSchema): - return bool(self.fields == other.fields and self.type == other.type) - return False - - def __hash__(self) -> int: - return hash((self.fields, self.type)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "RecordSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'fields' field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - ) - ) - else: - fields = None - try: - type = load_field( - _doc.get("type"), - typedsl_Record_nameLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'RecordSchema'", None, _errors__) - _constructed = cls( - fields=fields, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.fields is not None and "fields" not in r: - r["fields"] = save( - self.fields, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="fields", - val=r.get("fields"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type"]) - - -class EnumSchema(Saveable): - """ - Define an enumerated type. - - """ - - def __init__( - self, - symbols: Any, - type: Any, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.name = name - self.symbols = symbols - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, EnumSchema): - return bool( - self.name == other.name - and self.symbols == other.symbols - and self.type == other.type - ) - return False - - def __hash__(self) -> int: - return hash((self.name, self.symbols, self.type)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "EnumSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'symbols' field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Enum_nameLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'EnumSchema'", None, _errors__) - _constructed = cls( - name=name, - symbols=symbols, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) - r["symbols"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="symbols", - val=r.get("symbols"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["name", "symbols", "type"]) - - -class ArraySchema(Saveable): - def __init__( - self, - items: Any, - type: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ArraySchema): - return bool(self.items == other.items and self.type == other.type) - return False - - def __hash__(self) -> int: - return hash((self.items, self.type)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ArraySchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - items = load_field( - _doc.get("items"), - uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'items' field is not valid because:", - SourceLine(_doc, "items", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'ArraySchema'", None, _errors__) - _constructed = cls( - items=items, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.items is not None and "items" not in r: - u = save_relative_uri(self.items, base_url, False, 2, relative_uris) - r["items"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="items", - val=r.get("items"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type"]) - - -class File(Saveable): - """ - Represents a file (or group of files when `secondaryFiles` is provided) that - will be accessible by tools using standard POSIX file system call API such as - open(2) and read(2). - - Files are represented as objects with `class` of `File`. File objects have - a number of properties that provide metadata about the file. - - The `location` property of a File is a IRI that uniquely identifies the - file. Implementations must support the `file://` IRI scheme and may support - other schemes such as `http://` and `https://`. The value of `location` may also be a - relative reference, in which case it must be resolved relative to the IRI - of the document it appears in. Alternately to `location`, implementations - must also accept the `path` property on File, which must be a filesystem - path available on the same host as the CWL runner (for inputs) or the - runtime environment of a command line tool execution (for command line tool - outputs). - - If no `location` or `path` is specified, a file object must specify - `contents` with the UTF-8 text content of the file. This is a "file - literal". File literals do not correspond to external resources, but are - created on disk with `contents` with when needed for executing a tool. - Where appropriate, expressions can return file literals to define new files - on a runtime. The maximum size of `contents` is 64 kilobytes. - - The `basename` property defines the filename on disk where the file is - staged. This may differ from the resource name. If not provided, - `basename` must be computed from the last path part of `location` and made - available to expressions. - - The `secondaryFiles` property is a list of File or Directory objects that - must be staged in the same directory as the primary file. It is an error - for file names to be duplicated in `secondaryFiles`. - - The `size` property is the size in bytes of the File. It must be computed - from the resource and made available to expressions. The `checksum` field - contains a cryptographic hash of the file content for use it verifying file - contents. Implementations may, at user option, enable or disable - computation of the `checksum` field for performance or other reasons. - However, the ability to compute output checksums is required to pass the - CWL conformance test suite. - - When executing a CommandLineTool, the files and secondary files may be - staged to an arbitrary directory, but must use the value of `basename` for - the filename. The `path` property must be file path in the context of the - tool execution runtime (local to the compute node, or within the executing - container). All computed properties should be available to expressions. - File literals also must be staged and `path` must be set. - - When collecting CommandLineTool outputs, `glob` matching returns file paths - (with the `path` property) and the derived properties. This can all be - modified by `outputEval`. Alternately, if the file `cwl.output.json` is - present in the output, `outputBinding` is ignored. - - File objects in the output must provide either a `location` IRI or a `path` - property in the context of the tool execution runtime (local to the compute - node, or within the executing container). - - When evaluating an ExpressionTool, file objects must be referenced via - `location` (the expression tool does not have access to files on disk so - `path` is meaningless) or as file literals. It is legal to return a file - object with an existing `location` but a different `basename`. The - `loadContents` field of ExpressionTool inputs behaves the same as on - CommandLineTool inputs, however it is not meaningful on the outputs. - - An ExpressionTool may forward file references from input to output by using - the same value for `location`. - - """ - - def __init__( - self, - location: Optional[Any] = None, - path: Optional[Any] = None, - basename: Optional[Any] = None, - dirname: Optional[Any] = None, - nameroot: Optional[Any] = None, - nameext: Optional[Any] = None, - checksum: Optional[Any] = None, - size: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - format: Optional[Any] = None, - contents: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "File" - self.location = location - self.path = path - self.basename = basename - self.dirname = dirname - self.nameroot = nameroot - self.nameext = nameext - self.checksum = checksum - self.size = size - self.secondaryFiles = secondaryFiles - self.format = format - self.contents = contents - - def __eq__(self, other: Any) -> bool: - if isinstance(other, File): - return bool( - self.class_ == other.class_ - and self.location == other.location - and self.path == other.path - and self.basename == other.basename - and self.dirname == other.dirname - and self.nameroot == other.nameroot - and self.nameext == other.nameext - and self.checksum == other.checksum - and self.size == other.size - and self.secondaryFiles == other.secondaryFiles - and self.format == other.format - and self.contents == other.contents - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.class_, - self.location, - self.path, - self.basename, - self.dirname, - self.nameroot, - self.nameext, - self.checksum, - self.size, - self.secondaryFiles, - self.format, - self.contents, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "File": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "File": - raise ValidationException("Not a File") - - if "location" in _doc: - try: - location = load_field( - _doc.get("location"), - uri_union_of_None_type_or_strtype_False_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'location' field is not valid because:", - SourceLine(_doc, "location", str), - [e], - ) - ) - else: - location = None - if "path" in _doc: - try: - path = load_field( - _doc.get("path"), - uri_union_of_None_type_or_strtype_False_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'path' field is not valid because:", - SourceLine(_doc, "path", str), - [e], - ) - ) - else: - path = None - if "basename" in _doc: - try: - basename = load_field( - _doc.get("basename"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'basename' field is not valid because:", - SourceLine(_doc, "basename", str), - [e], - ) - ) - else: - basename = None - if "dirname" in _doc: - try: - dirname = load_field( - _doc.get("dirname"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dirname' field is not valid because:", - SourceLine(_doc, "dirname", str), - [e], - ) - ) - else: - dirname = None - if "nameroot" in _doc: - try: - nameroot = load_field( - _doc.get("nameroot"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'nameroot' field is not valid because:", - SourceLine(_doc, "nameroot", str), - [e], - ) - ) - else: - nameroot = None - if "nameext" in _doc: - try: - nameext = load_field( - _doc.get("nameext"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'nameext' field is not valid because:", - SourceLine(_doc, "nameext", str), - [e], - ) - ) - else: - nameext = None - if "checksum" in _doc: - try: - checksum = load_field( - _doc.get("checksum"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'checksum' field is not valid because:", - SourceLine(_doc, "checksum", str), - [e], - ) - ) - else: - checksum = None - if "size" in _doc: - try: - size = load_field( - _doc.get("size"), - union_of_None_type_or_inttype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'size' field is not valid because:", - SourceLine(_doc, "size", str), - [e], - ) - ) - else: - size = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "contents" in _doc: - try: - contents = load_field( - _doc.get("contents"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'contents' field is not valid because:", - SourceLine(_doc, "contents", str), - [e], - ) - ) - else: - contents = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `dirname`, `nameroot`, `nameext`, `checksum`, `size`, `secondaryFiles`, `format`, `contents`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'File'", None, _errors__) - _constructed = cls( - location=location, - path=path, - basename=basename, - dirname=dirname, - nameroot=nameroot, - nameext=nameext, - checksum=checksum, - size=size, - secondaryFiles=secondaryFiles, - format=format, - contents=contents, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "File" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.location is not None and "location" not in r: - u = save_relative_uri(self.location, base_url, False, None, relative_uris) - r["location"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="location", - val=r.get("location"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.path is not None and "path" not in r: - u = save_relative_uri(self.path, base_url, False, None, relative_uris) - r["path"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="path", - val=r.get("path"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.basename is not None and "basename" not in r: - r["basename"] = save( - self.basename, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="basename", - val=r.get("basename"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dirname is not None and "dirname" not in r: - r["dirname"] = save( - self.dirname, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dirname", - val=r.get("dirname"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.nameroot is not None and "nameroot" not in r: - r["nameroot"] = save( - self.nameroot, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="nameroot", - val=r.get("nameroot"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.nameext is not None and "nameext" not in r: - r["nameext"] = save( - self.nameext, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="nameext", - val=r.get("nameext"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.checksum is not None and "checksum" not in r: - r["checksum"] = save( - self.checksum, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="checksum", - val=r.get("checksum"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.size is not None and "size" not in r: - r["size"] = save( - self.size, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="size", - val=r.get("size"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, base_url, True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.contents is not None and "contents" not in r: - r["contents"] = save( - self.contents, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="contents", - val=r.get("contents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "class", - "location", - "path", - "basename", - "dirname", - "nameroot", - "nameext", - "checksum", - "size", - "secondaryFiles", - "format", - "contents", - ] - ) - - -class Directory(Saveable): - """ - Represents a directory to present to a command line tool. - - Directories are represented as objects with `class` of `Directory`. Directory objects have - a number of properties that provide metadata about the directory. - - The `location` property of a Directory is a IRI that uniquely identifies - the directory. Implementations must support the file:// IRI scheme and may - support other schemes such as http://. Alternately to `location`, - implementations must also accept the `path` property on Directory, which - must be a filesystem path available on the same host as the CWL runner (for - inputs) or the runtime environment of a command line tool execution (for - command line tool outputs). - - A Directory object may have a `listing` field. This is a list of File and - Directory objects that are contained in the Directory. For each entry in - `listing`, the `basename` property defines the name of the File or - Subdirectory when staged to disk. If `listing` is not provided, the - implementation must have some way of fetching the Directory listing at - runtime based on the `location` field. - - If a Directory does not have `location`, it is a Directory literal. A - Directory literal must provide `listing`. Directory literals must be - created on disk at runtime as needed. - - The resources in a Directory literal do not need to have any implied - relationship in their `location`. For example, a Directory listing may - contain two files located on different hosts. It is the responsibility of - the runtime to ensure that those files are staged to disk appropriately. - Secondary files associated with files in `listing` must also be staged to - the same Directory. - - When executing a CommandLineTool, Directories must be recursively staged - first and have local values of `path` assigned. - - Directory objects in CommandLineTool output must provide either a - `location` IRI or a `path` property in the context of the tool execution - runtime (local to the compute node, or within the executing container). - - An ExpressionTool may forward file references from input to output by using - the same value for `location`. - - Name conflicts (the same `basename` appearing multiple times in `listing` - or in any entry in `secondaryFiles` in the listing) is a fatal error. - - """ - - def __init__( - self, - location: Optional[Any] = None, - path: Optional[Any] = None, - basename: Optional[Any] = None, - listing: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "Directory" - self.location = location - self.path = path - self.basename = basename - self.listing = listing - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Directory): - return bool( - self.class_ == other.class_ - and self.location == other.location - and self.path == other.path - and self.basename == other.basename - and self.listing == other.listing - ) - return False - - def __hash__(self) -> int: - return hash( - (self.class_, self.location, self.path, self.basename, self.listing) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "Directory": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "Directory": - raise ValidationException("Not a Directory") - - if "location" in _doc: - try: - location = load_field( - _doc.get("location"), - uri_union_of_None_type_or_strtype_False_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'location' field is not valid because:", - SourceLine(_doc, "location", str), - [e], - ) - ) - else: - location = None - if "path" in _doc: - try: - path = load_field( - _doc.get("path"), - uri_union_of_None_type_or_strtype_False_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'path' field is not valid because:", - SourceLine(_doc, "path", str), - [e], - ) - ) - else: - path = None - if "basename" in _doc: - try: - basename = load_field( - _doc.get("basename"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'basename' field is not valid because:", - SourceLine(_doc, "basename", str), - [e], - ) - ) - else: - basename = None - if "listing" in _doc: - try: - listing = load_field( - _doc.get("listing"), - union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'listing' field is not valid because:", - SourceLine(_doc, "listing", str), - [e], - ) - ) - else: - listing = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `listing`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'Directory'", None, _errors__) - _constructed = cls( - location=location, - path=path, - basename=basename, - listing=listing, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "Directory" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.location is not None and "location" not in r: - u = save_relative_uri(self.location, base_url, False, None, relative_uris) - r["location"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="location", - val=r.get("location"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.path is not None and "path" not in r: - u = save_relative_uri(self.path, base_url, False, None, relative_uris) - r["path"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="path", - val=r.get("path"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.basename is not None and "basename" not in r: - r["basename"] = save( - self.basename, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="basename", - val=r.get("basename"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.listing is not None and "listing" not in r: - r["listing"] = save( - self.listing, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="listing", - val=r.get("listing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "location", "path", "basename", "listing"]) - - -class Labeled(Saveable): - pass - - -class Identified(Saveable): - pass - - -class LoadContents(Saveable): - pass - - -class FieldBase(Labeled): - pass - - -class InputFormat(Saveable): - pass - - -class OutputFormat(Saveable): - pass - - -class Parameter(FieldBase, Documented, Identified): - """ - Define an input or output parameter to a process. - - """ - - pass - - -class InputBinding(Saveable): - def __init__( - self, - loadContents: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.loadContents = loadContents - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InputBinding): - return bool(self.loadContents == other.loadContents) - return False - - def __hash__(self) -> int: - return hash((self.loadContents)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InputBinding": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `loadContents`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'InputBinding'", None, _errors__) - _constructed = cls( - loadContents=loadContents, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["loadContents"]) - - -class IOSchema(Labeled, Documented): - pass - - -class InputSchema(IOSchema): - pass - - -class OutputSchema(IOSchema): - pass - - -class InputRecordField(RecordField, FieldBase, InputFormat, LoadContents): - def __init__( - self, - name: Any, - type: Any, - doc: Optional[Any] = None, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.doc = doc - self.name = name - self.type = type - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.format = format - self.loadContents = loadContents - self.loadListing = loadListing - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InputRecordField): - return bool( - self.doc == other.doc - and self.name == other.name - and self.type == other.type - and self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.format == other.format - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.doc, - self.name, - self.type, - self.label, - self.secondaryFiles, - self.streamable, - self.format, - self.loadContents, - self.loadListing, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InputRecordField": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - raise ValidationException("Missing name") - if not __original_name_is_none: - baseuri = name - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadListing' field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - ) - ) - else: - loadListing = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'InputRecordField'", None, _errors__) - _constructed = cls( - doc=doc, - name=name, - type=type, - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - format=format, - loadContents=loadContents, - loadListing=loadListing, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri( - self.format, str(self.name), True, None, relative_uris - ) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadListing is not None and "loadListing" not in r: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadListing", - val=r.get("loadListing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "doc", - "name", - "type", - "label", - "secondaryFiles", - "streamable", - "format", - "loadContents", - "loadListing", - ] - ) - - -class InputRecordSchema(RecordSchema, InputSchema): - def __init__( - self, - type: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type = type - self.label = label - self.doc = doc - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InputRecordSchema): - return bool( - self.fields == other.fields - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.fields, self.type, self.label, self.doc, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InputRecordSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'fields' field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - ) - ) - else: - fields = None - try: - type = load_field( - _doc.get("type"), - typedsl_Record_nameLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'InputRecordSchema'", None, _errors__) - _constructed = cls( - fields=fields, - type=type, - label=label, - doc=doc, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.fields is not None and "fields" not in r: - r["fields"] = save( - self.fields, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="fields", - val=r.get("fields"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type", "label", "doc", "name"]) - - -class InputEnumSchema(EnumSchema, InputSchema): - def __init__( - self, - symbols: Any, - type: Any, - name: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.name = name - self.symbols = symbols - self.type = type - self.label = label - self.doc = doc - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InputEnumSchema): - return bool( - self.name == other.name - and self.symbols == other.symbols - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - ) - return False - - def __hash__(self) -> int: - return hash((self.name, self.symbols, self.type, self.label, self.doc)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InputEnumSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'symbols' field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Enum_nameLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'InputEnumSchema'", None, _errors__) - _constructed = cls( - name=name, - symbols=symbols, - type=type, - label=label, - doc=doc, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) - r["symbols"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="symbols", - val=r.get("symbols"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["name", "symbols", "type", "label", "doc"]) - - -class InputArraySchema(ArraySchema, InputSchema): - def __init__( - self, - items: Any, - type: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type = type - self.label = label - self.doc = doc - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InputArraySchema): - return bool( - self.items == other.items - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.items, self.type, self.label, self.doc, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InputArraySchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - items = load_field( - _doc.get("items"), - uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'items' field is not valid because:", - SourceLine(_doc, "items", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'InputArraySchema'", None, _errors__) - _constructed = cls( - items=items, - type=type, - label=label, - doc=doc, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.items is not None and "items" not in r: - u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) - r["items"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="items", - val=r.get("items"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type", "label", "doc", "name"]) - - -class OutputRecordField(RecordField, FieldBase, OutputFormat): - def __init__( - self, - name: Any, - type: Any, - doc: Optional[Any] = None, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - format: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.doc = doc - self.name = name - self.type = type - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.format = format - - def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputRecordField): - return bool( - self.doc == other.doc - and self.name == other.name - and self.type == other.type - and self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.format == other.format - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.doc, - self.name, - self.type, - self.label, - self.secondaryFiles, - self.streamable, - self.format, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "OutputRecordField": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - raise ValidationException("Missing name") - if not __original_name_is_none: - baseuri = name - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'OutputRecordField'", None, _errors__) - _constructed = cls( - doc=doc, - name=name, - type=type, - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - format=format, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri( - self.format, str(self.name), True, None, relative_uris - ) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - ["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"] - ) - - -class OutputRecordSchema(RecordSchema, OutputSchema): - def __init__( - self, - type: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type = type - self.label = label - self.doc = doc - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputRecordSchema): - return bool( - self.fields == other.fields - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.fields, self.type, self.label, self.doc, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "OutputRecordSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'fields' field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - ) - ) - else: - fields = None - try: - type = load_field( - _doc.get("type"), - typedsl_Record_nameLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'OutputRecordSchema'", None, _errors__) - _constructed = cls( - fields=fields, - type=type, - label=label, - doc=doc, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.fields is not None and "fields" not in r: - r["fields"] = save( - self.fields, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="fields", - val=r.get("fields"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type", "label", "doc", "name"]) - - -class OutputEnumSchema(EnumSchema, OutputSchema): - def __init__( - self, - symbols: Any, - type: Any, - name: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.name = name - self.symbols = symbols - self.type = type - self.label = label - self.doc = doc - - def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputEnumSchema): - return bool( - self.name == other.name - and self.symbols == other.symbols - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - ) - return False - - def __hash__(self) -> int: - return hash((self.name, self.symbols, self.type, self.label, self.doc)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "OutputEnumSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'symbols' field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Enum_nameLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'OutputEnumSchema'", None, _errors__) - _constructed = cls( - name=name, - symbols=symbols, - type=type, - label=label, - doc=doc, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) - r["symbols"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="symbols", - val=r.get("symbols"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["name", "symbols", "type", "label", "doc"]) - - -class OutputArraySchema(ArraySchema, OutputSchema): - def __init__( - self, - items: Any, - type: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type = type - self.label = label - self.doc = doc - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputArraySchema): - return bool( - self.items == other.items - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.items, self.type, self.label, self.doc, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "OutputArraySchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - items = load_field( - _doc.get("items"), - uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'items' field is not valid because:", - SourceLine(_doc, "items", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'OutputArraySchema'", None, _errors__) - _constructed = cls( - items=items, - type=type, - label=label, - doc=doc, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.items is not None and "items" not in r: - u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) - r["items"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="items", - val=r.get("items"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type", "label", "doc", "name"]) - - -class InputParameter(Parameter, InputFormat, LoadContents): - pass - - -class OutputParameter(Parameter, OutputFormat): - pass - - -class ProcessRequirement(Saveable): - """ - A process requirement declares a prerequisite that may or must be fulfilled - before executing a process. See [`Process.hints`](#process) and - [`Process.requirements`](#process). - - Process requirements are the primary mechanism for specifying extensions to - the CWL core specification. - - """ - - pass - - -class Process(Identified, Labeled, Documented): - """ - - The base executable type in CWL is the `Process` object defined by the - document. Note that the `Process` object is abstract and cannot be - directly executed. - - """ - - pass - - -class InlineJavascriptRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support inline Javascript expressions. - If this requirement is not present, the workflow platform must not perform expression - interpolation. - - """ - - def __init__( - self, - expressionLib: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "InlineJavascriptRequirement" - self.expressionLib = expressionLib - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InlineJavascriptRequirement): - return bool( - self.class_ == other.class_ - and self.expressionLib == other.expressionLib - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.expressionLib)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InlineJavascriptRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "InlineJavascriptRequirement": - raise ValidationException("Not a InlineJavascriptRequirement") - - if "expressionLib" in _doc: - try: - expressionLib = load_field( - _doc.get("expressionLib"), - union_of_None_type_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'expressionLib' field is not valid because:", - SourceLine(_doc, "expressionLib", str), - [e], - ) - ) - else: - expressionLib = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `expressionLib`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'InlineJavascriptRequirement'", None, _errors__ - ) - _constructed = cls( - expressionLib=expressionLib, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "InlineJavascriptRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.expressionLib is not None and "expressionLib" not in r: - r["expressionLib"] = save( - self.expressionLib, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="expressionLib", - val=r.get("expressionLib"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "expressionLib"]) - - -class CommandInputSchema(Saveable): - pass - - -class SchemaDefRequirement(ProcessRequirement): - """ - This field consists of an array of type definitions which must be used when - interpreting the `inputs` and `outputs` fields. When a `type` field - contains a IRI, the implementation must check if the type is defined in - `schemaDefs` and use that definition. If the type is not found in - `schemaDefs`, it is an error. The entries in `schemaDefs` must be - processed in the order listed such that later schema definitions may refer - to earlier schema definitions. - - - **Type definitions are allowed for `enum` and `record` types only.** - - Type definitions may be shared by defining them in a file and then - `$include`-ing them in the `types` field. - - A file can contain a list of type definitions - - """ - - def __init__( - self, - types: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "SchemaDefRequirement" - self.types = types - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SchemaDefRequirement): - return bool(self.class_ == other.class_ and self.types == other.types) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.types)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "SchemaDefRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "SchemaDefRequirement": - raise ValidationException("Not a SchemaDefRequirement") - - try: - types = load_field( - _doc.get("types"), - array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'types' field is not valid because:", - SourceLine(_doc, "types", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `types`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'SchemaDefRequirement'", None, _errors__) - _constructed = cls( - types=types, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "SchemaDefRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.types is not None and "types" not in r: - r["types"] = save( - self.types, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="types", - val=r.get("types"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "types"]) - - -class SecondaryFileSchema(Saveable): - """ - Secondary files are specified using the following micro-DSL for secondary files: - - * If the value is a string, it is transformed to an object with two fields - `pattern` and `required` - * By default, the value of `required` is `null` - (this indicates default behavior, which may be based on the context) - * If the value ends with a question mark `?` the question mark is - stripped off and the value of the field `required` is set to `False` - * The remaining value is assigned to the field `pattern` - - For implementation details and examples, please see - [this section](SchemaSalad.html#Domain_Specific_Language_for_secondary_files) - in the Schema Salad specification. - - """ - - def __init__( - self, - pattern: Any, - required: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.pattern = pattern - self.required = required - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SecondaryFileSchema): - return bool( - self.pattern == other.pattern and self.required == other.required - ) - return False - - def __hash__(self) -> int: - return hash((self.pattern, self.required)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "SecondaryFileSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - pattern = load_field( - _doc.get("pattern"), - union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'pattern' field is not valid because:", - SourceLine(_doc, "pattern", str), - [e], - ) - ) - if "required" in _doc: - try: - required = load_field( - _doc.get("required"), - union_of_None_type_or_booltype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'required' field is not valid because:", - SourceLine(_doc, "required", str), - [e], - ) - ) - else: - required = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `pattern`, `required`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'SecondaryFileSchema'", None, _errors__) - _constructed = cls( - pattern=pattern, - required=required, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.pattern is not None and "pattern" not in r: - r["pattern"] = save( - self.pattern, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="pattern", - val=r.get("pattern"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.required is not None and "required" not in r: - r["required"] = save( - self.required, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="required", - val=r.get("required"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["pattern", "required"]) - - -class LoadListingRequirement(ProcessRequirement): - """ - Specify the desired behavior for loading the `listing` field of - a Directory object for use by expressions. - - """ - - def __init__( - self, - loadListing: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "LoadListingRequirement" - self.loadListing = loadListing - - def __eq__(self, other: Any) -> bool: - if isinstance(other, LoadListingRequirement): - return bool( - self.class_ == other.class_ and self.loadListing == other.loadListing - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.loadListing)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "LoadListingRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "LoadListingRequirement": - raise ValidationException("Not a LoadListingRequirement") - - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadListing' field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - ) - ) - else: - loadListing = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `loadListing`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'LoadListingRequirement'", None, _errors__ - ) - _constructed = cls( - loadListing=loadListing, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "LoadListingRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.loadListing is not None and "loadListing" not in r: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadListing", - val=r.get("loadListing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "loadListing"]) - - -class EnvironmentDef(Saveable): - """ - Define an environment variable that will be set in the runtime environment - by the workflow platform when executing the command line tool. May be the - result of executing an expression, such as getting a parameter from input. - - """ - - def __init__( - self, - envName: Any, - envValue: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.envName = envName - self.envValue = envValue - - def __eq__(self, other: Any) -> bool: - if isinstance(other, EnvironmentDef): - return bool( - self.envName == other.envName and self.envValue == other.envValue - ) - return False - - def __hash__(self) -> int: - return hash((self.envName, self.envValue)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "EnvironmentDef": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - envName = load_field( - _doc.get("envName"), - strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'envName' field is not valid because:", - SourceLine(_doc, "envName", str), - [e], - ) - ) - try: - envValue = load_field( - _doc.get("envValue"), - union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'envValue' field is not valid because:", - SourceLine(_doc, "envValue", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `envName`, `envValue`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'EnvironmentDef'", None, _errors__) - _constructed = cls( - envName=envName, - envValue=envValue, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.envName is not None and "envName" not in r: - r["envName"] = save( - self.envName, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="envName", - val=r.get("envName"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.envValue is not None and "envValue" not in r: - r["envValue"] = save( - self.envValue, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="envValue", - val=r.get("envValue"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["envName", "envValue"]) - - -class CommandLineBinding(InputBinding): - """ - - When listed under `inputBinding` in the input schema, the term - "value" refers to the corresponding value in the input object. For - binding objects listed in `CommandLineTool.arguments`, the term "value" - refers to the effective value after evaluating `valueFrom`. - - The binding behavior when building the command line depends on the data - type of the value. If there is a mismatch between the type described by - the input schema and the effective value, such as resulting from an - expression evaluation, an implementation must use the data type of the - effective value. - - - **string**: Add `prefix` and the string to the command line. - - - **number**: Add `prefix` and decimal representation to command line. - - - **boolean**: If true, add `prefix` to the command line. If false, add - nothing. - - - **File**: Add `prefix` and the value of - [`File.path`](#File) to the command line. - - - **Directory**: Add `prefix` and the value of - [`Directory.path`](#Directory) to the command line. - - - **array**: If `itemSeparator` is specified, add `prefix` and the join - the array into a single string with `itemSeparator` separating the - items. Otherwise, first add `prefix`, then recursively process - individual elements. - If the array is empty, it does not add anything to command line. - - - **object**: Add `prefix` only, and recursively add object fields for - which `inputBinding` is specified. - - - **null**: Add nothing. - - """ - - def __init__( - self, - loadContents: Optional[Any] = None, - position: Optional[Any] = None, - prefix: Optional[Any] = None, - separate: Optional[Any] = None, - itemSeparator: Optional[Any] = None, - valueFrom: Optional[Any] = None, - shellQuote: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.loadContents = loadContents - self.position = position - self.prefix = prefix - self.separate = separate - self.itemSeparator = itemSeparator - self.valueFrom = valueFrom - self.shellQuote = shellQuote - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandLineBinding): - return bool( - self.loadContents == other.loadContents - and self.position == other.position - and self.prefix == other.prefix - and self.separate == other.separate - and self.itemSeparator == other.itemSeparator - and self.valueFrom == other.valueFrom - and self.shellQuote == other.shellQuote - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.loadContents, - self.position, - self.prefix, - self.separate, - self.itemSeparator, - self.valueFrom, - self.shellQuote, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandLineBinding": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - if "position" in _doc: - try: - position = load_field( - _doc.get("position"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'position' field is not valid because:", - SourceLine(_doc, "position", str), - [e], - ) - ) - else: - position = None - if "prefix" in _doc: - try: - prefix = load_field( - _doc.get("prefix"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'prefix' field is not valid because:", - SourceLine(_doc, "prefix", str), - [e], - ) - ) - else: - prefix = None - if "separate" in _doc: - try: - separate = load_field( - _doc.get("separate"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'separate' field is not valid because:", - SourceLine(_doc, "separate", str), - [e], - ) - ) - else: - separate = None - if "itemSeparator" in _doc: - try: - itemSeparator = load_field( - _doc.get("itemSeparator"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'itemSeparator' field is not valid because:", - SourceLine(_doc, "itemSeparator", str), - [e], - ) - ) - else: - itemSeparator = None - if "valueFrom" in _doc: - try: - valueFrom = load_field( - _doc.get("valueFrom"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'valueFrom' field is not valid because:", - SourceLine(_doc, "valueFrom", str), - [e], - ) - ) - else: - valueFrom = None - if "shellQuote" in _doc: - try: - shellQuote = load_field( - _doc.get("shellQuote"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'shellQuote' field is not valid because:", - SourceLine(_doc, "shellQuote", str), - [e], - ) - ) - else: - shellQuote = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `loadContents`, `position`, `prefix`, `separate`, `itemSeparator`, `valueFrom`, `shellQuote`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'CommandLineBinding'", None, _errors__) - _constructed = cls( - loadContents=loadContents, - position=position, - prefix=prefix, - separate=separate, - itemSeparator=itemSeparator, - valueFrom=valueFrom, - shellQuote=shellQuote, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.position is not None and "position" not in r: - r["position"] = save( - self.position, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="position", - val=r.get("position"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.prefix is not None and "prefix" not in r: - r["prefix"] = save( - self.prefix, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="prefix", - val=r.get("prefix"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.separate is not None and "separate" not in r: - r["separate"] = save( - self.separate, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="separate", - val=r.get("separate"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.itemSeparator is not None and "itemSeparator" not in r: - r["itemSeparator"] = save( - self.itemSeparator, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="itemSeparator", - val=r.get("itemSeparator"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.valueFrom is not None and "valueFrom" not in r: - r["valueFrom"] = save( - self.valueFrom, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="valueFrom", - val=r.get("valueFrom"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.shellQuote is not None and "shellQuote" not in r: - r["shellQuote"] = save( - self.shellQuote, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="shellQuote", - val=r.get("shellQuote"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "loadContents", - "position", - "prefix", - "separate", - "itemSeparator", - "valueFrom", - "shellQuote", - ] - ) - - -class CommandOutputBinding(LoadContents): - """ - Describes how to generate an output parameter based on the files produced - by a CommandLineTool. - - The output parameter value is generated by applying these operations in the - following order: - - - glob - - loadContents - - outputEval - - secondaryFiles - - """ - - def __init__( - self, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - glob: Optional[Any] = None, - outputEval: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.loadContents = loadContents - self.loadListing = loadListing - self.glob = glob - self.outputEval = outputEval - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputBinding): - return bool( - self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.glob == other.glob - and self.outputEval == other.outputEval - ) - return False - - def __hash__(self) -> int: - return hash((self.loadContents, self.loadListing, self.glob, self.outputEval)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputBinding": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadListing' field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - ) - ) - else: - loadListing = None - if "glob" in _doc: - try: - glob = load_field( - _doc.get("glob"), - union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'glob' field is not valid because:", - SourceLine(_doc, "glob", str), - [e], - ) - ) - else: - glob = None - if "outputEval" in _doc: - try: - outputEval = load_field( - _doc.get("outputEval"), - union_of_None_type_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputEval' field is not valid because:", - SourceLine(_doc, "outputEval", str), - [e], - ) - ) - else: - outputEval = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `loadContents`, `loadListing`, `glob`, `outputEval`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'CommandOutputBinding'", None, _errors__) - _constructed = cls( - loadContents=loadContents, - loadListing=loadListing, - glob=glob, - outputEval=outputEval, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadListing is not None and "loadListing" not in r: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadListing", - val=r.get("loadListing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.glob is not None and "glob" not in r: - r["glob"] = save( - self.glob, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="glob", - val=r.get("glob"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputEval is not None and "outputEval" not in r: - r["outputEval"] = save( - self.outputEval, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputEval", - val=r.get("outputEval"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["loadContents", "loadListing", "glob", "outputEval"]) - - -class CommandLineBindable(Saveable): - def __init__( - self, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandLineBindable): - return bool(self.inputBinding == other.inputBinding) - return False - - def __hash__(self) -> int: - return hash((self.inputBinding)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandLineBindable": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'CommandLineBindable'", None, _errors__) - _constructed = cls( - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["inputBinding"]) - - -class CommandInputRecordField(InputRecordField, CommandLineBindable): - def __init__( - self, - name: Any, - type: Any, - doc: Optional[Any] = None, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.doc = doc - self.name = name - self.type = type - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.format = format - self.loadContents = loadContents - self.loadListing = loadListing - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputRecordField): - return bool( - self.doc == other.doc - and self.name == other.name - and self.type == other.type - and self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.format == other.format - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.doc, - self.name, - self.type, - self.label, - self.secondaryFiles, - self.streamable, - self.format, - self.loadContents, - self.loadListing, - self.inputBinding, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandInputRecordField": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - raise ValidationException("Missing name") - if not __original_name_is_none: - baseuri = name - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadListing' field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - ) - ) - else: - loadListing = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandInputRecordField'", None, _errors__ - ) - _constructed = cls( - doc=doc, - name=name, - type=type, - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - format=format, - loadContents=loadContents, - loadListing=loadListing, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri( - self.format, str(self.name), True, None, relative_uris - ) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadListing is not None and "loadListing" not in r: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadListing", - val=r.get("loadListing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "doc", - "name", - "type", - "label", - "secondaryFiles", - "streamable", - "format", - "loadContents", - "loadListing", - "inputBinding", - ] - ) - - -class CommandInputRecordSchema( - InputRecordSchema, CommandInputSchema, CommandLineBindable -): - def __init__( - self, - type: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type = type - self.label = label - self.doc = doc - self.name = name - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputRecordSchema): - return bool( - self.fields == other.fields - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - (self.fields, self.type, self.label, self.doc, self.name, self.inputBinding) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandInputRecordSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'fields' field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - ) - ) - else: - fields = None - try: - type = load_field( - _doc.get("type"), - typedsl_Record_nameLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandInputRecordSchema'", None, _errors__ - ) - _constructed = cls( - fields=fields, - type=type, - label=label, - doc=doc, - name=name, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.fields is not None and "fields" not in r: - r["fields"] = save( - self.fields, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="fields", - val=r.get("fields"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type", "label", "doc", "name", "inputBinding"]) - - -class CommandInputEnumSchema(InputEnumSchema, CommandInputSchema, CommandLineBindable): - def __init__( - self, - symbols: Any, - type: Any, - name: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.name = name - self.symbols = symbols - self.type = type - self.label = label - self.doc = doc - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputEnumSchema): - return bool( - self.name == other.name - and self.symbols == other.symbols - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.name, - self.symbols, - self.type, - self.label, - self.doc, - self.inputBinding, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandInputEnumSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'symbols' field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Enum_nameLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandInputEnumSchema'", None, _errors__ - ) - _constructed = cls( - name=name, - symbols=symbols, - type=type, - label=label, - doc=doc, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) - r["symbols"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="symbols", - val=r.get("symbols"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["name", "symbols", "type", "label", "doc", "inputBinding"]) - - -class CommandInputArraySchema( - InputArraySchema, CommandInputSchema, CommandLineBindable -): - def __init__( - self, - items: Any, - type: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type = type - self.label = label - self.doc = doc - self.name = name - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputArraySchema): - return bool( - self.items == other.items - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - (self.items, self.type, self.label, self.doc, self.name, self.inputBinding) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandInputArraySchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - items = load_field( - _doc.get("items"), - uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'items' field is not valid because:", - SourceLine(_doc, "items", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandInputArraySchema'", None, _errors__ - ) - _constructed = cls( - items=items, - type=type, - label=label, - doc=doc, - name=name, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.items is not None and "items" not in r: - u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) - r["items"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="items", - val=r.get("items"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type", "label", "doc", "name", "inputBinding"]) - - -class CommandOutputRecordField(OutputRecordField): - def __init__( - self, - name: Any, - type: Any, - doc: Optional[Any] = None, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - format: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.doc = doc - self.name = name - self.type = type - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.format = format - self.outputBinding = outputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputRecordField): - return bool( - self.doc == other.doc - and self.name == other.name - and self.type == other.type - and self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.format == other.format - and self.outputBinding == other.outputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.doc, - self.name, - self.type, - self.label, - self.secondaryFiles, - self.streamable, - self.format, - self.outputBinding, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputRecordField": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - raise ValidationException("Missing name") - if not __original_name_is_none: - baseuri = name - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputBinding' field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - ) - ) - else: - outputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `outputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandOutputRecordField'", None, _errors__ - ) - _constructed = cls( - doc=doc, - name=name, - type=type, - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - format=format, - outputBinding=outputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri( - self.format, str(self.name), True, None, relative_uris - ) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputBinding is not None and "outputBinding" not in r: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputBinding", - val=r.get("outputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "doc", - "name", - "type", - "label", - "secondaryFiles", - "streamable", - "format", - "outputBinding", - ] - ) - - -class CommandOutputRecordSchema(OutputRecordSchema): - def __init__( - self, - type: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type = type - self.label = label - self.doc = doc - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputRecordSchema): - return bool( - self.fields == other.fields - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.fields, self.type, self.label, self.doc, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputRecordSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'fields' field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - ) - ) - else: - fields = None - try: - type = load_field( - _doc.get("type"), - typedsl_Record_nameLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandOutputRecordSchema'", None, _errors__ - ) - _constructed = cls( - fields=fields, - type=type, - label=label, - doc=doc, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.fields is not None and "fields" not in r: - r["fields"] = save( - self.fields, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="fields", - val=r.get("fields"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type", "label", "doc", "name"]) - - -class CommandOutputEnumSchema(OutputEnumSchema): - def __init__( - self, - symbols: Any, - type: Any, - name: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.name = name - self.symbols = symbols - self.type = type - self.label = label - self.doc = doc - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputEnumSchema): - return bool( - self.name == other.name - and self.symbols == other.symbols - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - ) - return False - - def __hash__(self) -> int: - return hash((self.name, self.symbols, self.type, self.label, self.doc)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputEnumSchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'symbols' field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Enum_nameLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandOutputEnumSchema'", None, _errors__ - ) - _constructed = cls( - name=name, - symbols=symbols, - type=type, - label=label, - doc=doc, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.symbols is not None and "symbols" not in r: - u = save_relative_uri( - self.symbols, str(self.name), True, None, relative_uris - ) - r["symbols"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="symbols", - val=r.get("symbols"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["name", "symbols", "type", "label", "doc"]) - - -class CommandOutputArraySchema(OutputArraySchema): - def __init__( - self, - items: Any, - type: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type = type - self.label = label - self.doc = doc - self.name = name - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputArraySchema): - return bool( - self.items == other.items - and self.type == other.type - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.items, self.type, self.label, self.doc, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputArraySchema": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'name' field is not valid because:", - SourceLine(_doc, "name", str), - [e], - ) - ) - else: - name = None - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = name - try: - items = load_field( - _doc.get("items"), - uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'items' field is not valid because:", - SourceLine(_doc, "items", str), - [e], - ) - ) - try: - type = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandOutputArraySchema'", None, _errors__ - ) - _constructed = cls( - items=items, - type=type, - label=label, - doc=doc, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[name] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.name is not None and "name" not in r: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="name", - val=r.get("name"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.items is not None and "items" not in r: - u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) - r["items"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="items", - val=r.get("items"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.name), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type", "label", "doc", "name"]) - - -class CommandInputParameter(InputParameter): - """ - An input parameter for a CommandLineTool. - """ - - def __init__( - self, - type: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - id: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - default: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.format = format - self.loadContents = loadContents - self.loadListing = loadListing - self.default = default - self.type = type - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.default == other.default - and self.type == other.type - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.loadContents, - self.loadListing, - self.default, - self.type, - self.inputBinding, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandInputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadListing' field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - ) - ) - else: - loadListing = None - if "default" in _doc: - try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'default' field is not valid because:", - SourceLine(_doc, "default", str), - [e], - ) - ) - else: - default = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'CommandInputParameter'", None, _errors__) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - loadContents=loadContents, - loadListing=loadListing, - default=default, - type=type, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadListing is not None and "loadListing" not in r: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadListing", - val=r.get("loadListing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.default is not None and "default" not in r: - r["default"] = save( - self.default, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="default", - val=r.get("default"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "format", - "loadContents", - "loadListing", - "default", - "type", - "inputBinding", - ] - ) - - -class CommandOutputParameter(OutputParameter): - """ - An output parameter for a CommandLineTool. - """ - - def __init__( - self, - type: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - id: Optional[Any] = None, - format: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.format = format - self.type = type - self.outputBinding = outputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.type == other.type - and self.outputBinding == other.outputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.type, - self.outputBinding, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandOutputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputBinding' field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - ) - ) - else: - outputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`, `outputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'CommandOutputParameter'", None, _errors__ - ) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - type=type, - outputBinding=outputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputBinding is not None and "outputBinding" not in r: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputBinding", - val=r.get("outputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "format", - "type", - "outputBinding", - ] - ) - - -class CommandLineTool(Process): - """ - This defines the schema of the CWL Command Line Tool Description document. - - """ - - def __init__( - self, - inputs: Any, - outputs: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - intent: Optional[Any] = None, - baseCommand: Optional[Any] = None, - arguments: Optional[Any] = None, - stdin: Optional[Any] = None, - stderr: Optional[Any] = None, - stdout: Optional[Any] = None, - successCodes: Optional[Any] = None, - temporaryFailCodes: Optional[Any] = None, - permanentFailCodes: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - self.label = label - self.doc = doc - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.cwlVersion = cwlVersion - self.intent = intent - self.class_ = "CommandLineTool" - self.baseCommand = baseCommand - self.arguments = arguments - self.stdin = stdin - self.stderr = stderr - self.stdout = stdout - self.successCodes = successCodes - self.temporaryFailCodes = temporaryFailCodes - self.permanentFailCodes = permanentFailCodes - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandLineTool): - return bool( - self.id == other.id - and self.label == other.label - and self.doc == other.doc - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.cwlVersion == other.cwlVersion - and self.intent == other.intent - and self.class_ == other.class_ - and self.baseCommand == other.baseCommand - and self.arguments == other.arguments - and self.stdin == other.stdin - and self.stderr == other.stderr - and self.stdout == other.stdout - and self.successCodes == other.successCodes - and self.temporaryFailCodes == other.temporaryFailCodes - and self.permanentFailCodes == other.permanentFailCodes - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.label, - self.doc, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.cwlVersion, - self.intent, - self.class_, - self.baseCommand, - self.arguments, - self.stdin, - self.stderr, - self.stdout, - self.successCodes, - self.temporaryFailCodes, - self.permanentFailCodes, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "CommandLineTool": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "CommandLineTool": - raise ValidationException("Not a CommandLineTool") - - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_CommandInputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputs' field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - ) - ) - try: - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_CommandOutputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputs' field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - ) - ) - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'requirements' field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - ) - ) - else: - requirements = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'hints' field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - ) - ) - else: - hints = None - if "cwlVersion" in _doc: - try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'cwlVersion' field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [e], - ) - ) - else: - cwlVersion = None - if "intent" in _doc: - try: - intent = load_field( - _doc.get("intent"), - uri_union_of_None_type_or_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'intent' field is not valid because:", - SourceLine(_doc, "intent", str), - [e], - ) - ) - else: - intent = None - if "baseCommand" in _doc: - try: - baseCommand = load_field( - _doc.get("baseCommand"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'baseCommand' field is not valid because:", - SourceLine(_doc, "baseCommand", str), - [e], - ) - ) - else: - baseCommand = None - if "arguments" in _doc: - try: - arguments = load_field( - _doc.get("arguments"), - union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'arguments' field is not valid because:", - SourceLine(_doc, "arguments", str), - [e], - ) - ) - else: - arguments = None - if "stdin" in _doc: - try: - stdin = load_field( - _doc.get("stdin"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'stdin' field is not valid because:", - SourceLine(_doc, "stdin", str), - [e], - ) - ) - else: - stdin = None - if "stderr" in _doc: - try: - stderr = load_field( - _doc.get("stderr"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'stderr' field is not valid because:", - SourceLine(_doc, "stderr", str), - [e], - ) - ) - else: - stderr = None - if "stdout" in _doc: - try: - stdout = load_field( - _doc.get("stdout"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'stdout' field is not valid because:", - SourceLine(_doc, "stdout", str), - [e], - ) - ) - else: - stdout = None - if "successCodes" in _doc: - try: - successCodes = load_field( - _doc.get("successCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'successCodes' field is not valid because:", - SourceLine(_doc, "successCodes", str), - [e], - ) - ) - else: - successCodes = None - if "temporaryFailCodes" in _doc: - try: - temporaryFailCodes = load_field( - _doc.get("temporaryFailCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'temporaryFailCodes' field is not valid because:", - SourceLine(_doc, "temporaryFailCodes", str), - [e], - ) - ) - else: - temporaryFailCodes = None - if "permanentFailCodes" in _doc: - try: - permanentFailCodes = load_field( - _doc.get("permanentFailCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'permanentFailCodes' field is not valid because:", - SourceLine(_doc, "permanentFailCodes", str), - [e], - ) - ) - else: - permanentFailCodes = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `baseCommand`, `arguments`, `stdin`, `stderr`, `stdout`, `successCodes`, `temporaryFailCodes`, `permanentFailCodes`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'CommandLineTool'", None, _errors__) - _constructed = cls( - id=id, - label=label, - doc=doc, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - cwlVersion=cwlVersion, - intent=intent, - baseCommand=baseCommand, - arguments=arguments, - stdin=stdin, - stderr=stderr, - stdout=stdout, - successCodes=successCodes, - temporaryFailCodes=temporaryFailCodes, - permanentFailCodes=permanentFailCodes, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "CommandLineTool" - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputs is not None and "inputs" not in r: - r["inputs"] = save( - self.inputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputs", - val=r.get("inputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputs is not None and "outputs" not in r: - r["outputs"] = save( - self.outputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputs", - val=r.get("outputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.requirements is not None and "requirements" not in r: - r["requirements"] = save( - self.requirements, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="requirements", - val=r.get("requirements"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.hints is not None and "hints" not in r: - r["hints"] = save( - self.hints, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="hints", - val=r.get("hints"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) - r["cwlVersion"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="cwlVersion", - val=r.get("cwlVersion"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.intent is not None and "intent" not in r: - u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) - r["intent"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="intent", - val=r.get("intent"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.baseCommand is not None and "baseCommand" not in r: - r["baseCommand"] = save( - self.baseCommand, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="baseCommand", - val=r.get("baseCommand"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.arguments is not None and "arguments" not in r: - r["arguments"] = save( - self.arguments, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="arguments", - val=r.get("arguments"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.stdin is not None and "stdin" not in r: - r["stdin"] = save( - self.stdin, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="stdin", - val=r.get("stdin"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.stderr is not None and "stderr" not in r: - r["stderr"] = save( - self.stderr, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="stderr", - val=r.get("stderr"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.stdout is not None and "stdout" not in r: - r["stdout"] = save( - self.stdout, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="stdout", - val=r.get("stdout"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.successCodes is not None and "successCodes" not in r: - r["successCodes"] = save( - self.successCodes, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="successCodes", - val=r.get("successCodes"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.temporaryFailCodes is not None and "temporaryFailCodes" not in r: - r["temporaryFailCodes"] = save( - self.temporaryFailCodes, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="temporaryFailCodes", - val=r.get("temporaryFailCodes"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.permanentFailCodes is not None and "permanentFailCodes" not in r: - r["permanentFailCodes"] = save( - self.permanentFailCodes, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="permanentFailCodes", - val=r.get("permanentFailCodes"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "label", - "doc", - "inputs", - "outputs", - "requirements", - "hints", - "cwlVersion", - "intent", - "class", - "baseCommand", - "arguments", - "stdin", - "stderr", - "stdout", - "successCodes", - "temporaryFailCodes", - "permanentFailCodes", - ] - ) - - -class DockerRequirement(ProcessRequirement): - """ - Indicates that a workflow component should be run in a - [Docker](https://docker.com) or Docker-compatible (such as - [Singularity](https://www.sylabs.io/) and [udocker](https://github.com/indigo-dc/udocker)) container environment and - specifies how to fetch or build the image. - - If a CommandLineTool lists `DockerRequirement` under - `hints` (or `requirements`), it may (or must) be run in the specified Docker - container. - - The platform must first acquire or install the correct Docker image as - specified by `dockerPull`, `dockerImport`, `dockerLoad` or `dockerFile`. - - The platform must execute the tool in the container using `docker run` with - the appropriate Docker image and tool command line. - - The workflow platform may provide input files and the designated output - directory through the use of volume bind mounts. The platform should rewrite - file paths in the input object to correspond to the Docker bind mounted - locations. That is, the platform should rewrite values in the parameter context - such as `runtime.outdir`, `runtime.tmpdir` and others to be valid paths - within the container. The platform must ensure that `runtime.outdir` and - `runtime.tmpdir` are distinct directories. - - When running a tool contained in Docker, the workflow platform must not - assume anything about the contents of the Docker container, such as the - presence or absence of specific software, except to assume that the - generated command line represents a valid command within the runtime - environment of the container. - - A container image may specify an - [ENTRYPOINT](https://docs.docker.com/engine/reference/builder/#entrypoint) - and/or - [CMD](https://docs.docker.com/engine/reference/builder/#cmd). - Command line arguments will be appended after all elements of - ENTRYPOINT, and will override all elements specified using CMD (in - other words, CMD is only used when the CommandLineTool definition - produces an empty command line). - - Use of implicit ENTRYPOINT or CMD are discouraged due to reproducibility - concerns of the implicit hidden execution point (For further discussion, see - [https://doi.org/10.12688/f1000research.15140.1](https://doi.org/10.12688/f1000research.15140.1)). Portable - CommandLineTool wrappers in which use of a container is optional must not rely on ENTRYPOINT or CMD. - CommandLineTools which do rely on ENTRYPOINT or CMD must list `DockerRequirement` in the - `requirements` section. - - ## Interaction with other requirements - - If [EnvVarRequirement](#EnvVarRequirement) is specified alongside a - DockerRequirement, the environment variables must be provided to Docker - using `--env` or `--env-file` and interact with the container's preexisting - environment as defined by Docker. - - """ - - def __init__( - self, - dockerPull: Optional[Any] = None, - dockerLoad: Optional[Any] = None, - dockerFile: Optional[Any] = None, - dockerImport: Optional[Any] = None, - dockerImageId: Optional[Any] = None, - dockerOutputDirectory: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "DockerRequirement" - self.dockerPull = dockerPull - self.dockerLoad = dockerLoad - self.dockerFile = dockerFile - self.dockerImport = dockerImport - self.dockerImageId = dockerImageId - self.dockerOutputDirectory = dockerOutputDirectory - - def __eq__(self, other: Any) -> bool: - if isinstance(other, DockerRequirement): - return bool( - self.class_ == other.class_ - and self.dockerPull == other.dockerPull - and self.dockerLoad == other.dockerLoad - and self.dockerFile == other.dockerFile - and self.dockerImport == other.dockerImport - and self.dockerImageId == other.dockerImageId - and self.dockerOutputDirectory == other.dockerOutputDirectory - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.class_, - self.dockerPull, - self.dockerLoad, - self.dockerFile, - self.dockerImport, - self.dockerImageId, - self.dockerOutputDirectory, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "DockerRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "DockerRequirement": - raise ValidationException("Not a DockerRequirement") - - if "dockerPull" in _doc: - try: - dockerPull = load_field( - _doc.get("dockerPull"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerPull' field is not valid because:", - SourceLine(_doc, "dockerPull", str), - [e], - ) - ) - else: - dockerPull = None - if "dockerLoad" in _doc: - try: - dockerLoad = load_field( - _doc.get("dockerLoad"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerLoad' field is not valid because:", - SourceLine(_doc, "dockerLoad", str), - [e], - ) - ) - else: - dockerLoad = None - if "dockerFile" in _doc: - try: - dockerFile = load_field( - _doc.get("dockerFile"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerFile' field is not valid because:", - SourceLine(_doc, "dockerFile", str), - [e], - ) - ) - else: - dockerFile = None - if "dockerImport" in _doc: - try: - dockerImport = load_field( - _doc.get("dockerImport"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerImport' field is not valid because:", - SourceLine(_doc, "dockerImport", str), - [e], - ) - ) - else: - dockerImport = None - if "dockerImageId" in _doc: - try: - dockerImageId = load_field( - _doc.get("dockerImageId"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerImageId' field is not valid because:", - SourceLine(_doc, "dockerImageId", str), - [e], - ) - ) - else: - dockerImageId = None - if "dockerOutputDirectory" in _doc: - try: - dockerOutputDirectory = load_field( - _doc.get("dockerOutputDirectory"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'dockerOutputDirectory' field is not valid because:", - SourceLine(_doc, "dockerOutputDirectory", str), - [e], - ) - ) - else: - dockerOutputDirectory = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `dockerPull`, `dockerLoad`, `dockerFile`, `dockerImport`, `dockerImageId`, `dockerOutputDirectory`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'DockerRequirement'", None, _errors__) - _constructed = cls( - dockerPull=dockerPull, - dockerLoad=dockerLoad, - dockerFile=dockerFile, - dockerImport=dockerImport, - dockerImageId=dockerImageId, - dockerOutputDirectory=dockerOutputDirectory, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "DockerRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.dockerPull is not None and "dockerPull" not in r: - r["dockerPull"] = save( - self.dockerPull, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerPull", - val=r.get("dockerPull"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dockerLoad is not None and "dockerLoad" not in r: - r["dockerLoad"] = save( - self.dockerLoad, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerLoad", - val=r.get("dockerLoad"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dockerFile is not None and "dockerFile" not in r: - r["dockerFile"] = save( - self.dockerFile, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerFile", - val=r.get("dockerFile"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dockerImport is not None and "dockerImport" not in r: - r["dockerImport"] = save( - self.dockerImport, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerImport", - val=r.get("dockerImport"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dockerImageId is not None and "dockerImageId" not in r: - r["dockerImageId"] = save( - self.dockerImageId, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerImageId", - val=r.get("dockerImageId"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.dockerOutputDirectory is not None and "dockerOutputDirectory" not in r: - r["dockerOutputDirectory"] = save( - self.dockerOutputDirectory, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="dockerOutputDirectory", - val=r.get("dockerOutputDirectory"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "class", - "dockerPull", - "dockerLoad", - "dockerFile", - "dockerImport", - "dockerImageId", - "dockerOutputDirectory", - ] - ) - - -class SoftwareRequirement(ProcessRequirement): - """ - A list of software packages that should be configured in the environment of - the defined process. - - """ - - def __init__( - self, - packages: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "SoftwareRequirement" - self.packages = packages - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SoftwareRequirement): - return bool(self.class_ == other.class_ and self.packages == other.packages) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.packages)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "SoftwareRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "SoftwareRequirement": - raise ValidationException("Not a SoftwareRequirement") - - try: - packages = load_field( - _doc.get("packages"), - idmap_packages_array_of_SoftwarePackageLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'packages' field is not valid because:", - SourceLine(_doc, "packages", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `packages`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'SoftwareRequirement'", None, _errors__) - _constructed = cls( - packages=packages, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "SoftwareRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.packages is not None and "packages" not in r: - r["packages"] = save( - self.packages, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="packages", - val=r.get("packages"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "packages"]) - - -class SoftwarePackage(Saveable): - def __init__( - self, - package: Any, - version: Optional[Any] = None, - specs: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.package = package - self.version = version - self.specs = specs - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SoftwarePackage): - return bool( - self.package == other.package - and self.version == other.version - and self.specs == other.specs - ) - return False - - def __hash__(self) -> int: - return hash((self.package, self.version, self.specs)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "SoftwarePackage": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - package = load_field( - _doc.get("package"), - strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'package' field is not valid because:", - SourceLine(_doc, "package", str), - [e], - ) - ) - if "version" in _doc: - try: - version = load_field( - _doc.get("version"), - union_of_None_type_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'version' field is not valid because:", - SourceLine(_doc, "version", str), - [e], - ) - ) - else: - version = None - if "specs" in _doc: - try: - specs = load_field( - _doc.get("specs"), - uri_union_of_None_type_or_array_of_strtype_False_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'specs' field is not valid because:", - SourceLine(_doc, "specs", str), - [e], - ) - ) - else: - specs = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `package`, `version`, `specs`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'SoftwarePackage'", None, _errors__) - _constructed = cls( - package=package, - version=version, - specs=specs, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.package is not None and "package" not in r: - r["package"] = save( - self.package, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="package", - val=r.get("package"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.version is not None and "version" not in r: - r["version"] = save( - self.version, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="version", - val=r.get("version"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.specs is not None and "specs" not in r: - u = save_relative_uri(self.specs, base_url, False, None, relative_uris) - r["specs"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="specs", - val=r.get("specs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["package", "version", "specs"]) - - -class Dirent(Saveable): - """ - Define a file or subdirectory that must be staged to a particular - place prior to executing the command line tool. May be the result - of executing an expression, such as building a configuration file - from a template. - - Usually files are staged within the [designated output directory](#Runtime_environment). - However, under certain circumstances, files may be staged at - arbitrary locations, see discussion for `entryname`. - - """ - - def __init__( - self, - entry: Any, - entryname: Optional[Any] = None, - writable: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.entryname = entryname - self.entry = entry - self.writable = writable - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Dirent): - return bool( - self.entryname == other.entryname - and self.entry == other.entry - and self.writable == other.writable - ) - return False - - def __hash__(self) -> int: - return hash((self.entryname, self.entry, self.writable)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "Dirent": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "entryname" in _doc: - try: - entryname = load_field( - _doc.get("entryname"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'entryname' field is not valid because:", - SourceLine(_doc, "entryname", str), - [e], - ) - ) - else: - entryname = None - try: - entry = load_field( - _doc.get("entry"), - union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'entry' field is not valid because:", - SourceLine(_doc, "entry", str), - [e], - ) - ) - if "writable" in _doc: - try: - writable = load_field( - _doc.get("writable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'writable' field is not valid because:", - SourceLine(_doc, "writable", str), - [e], - ) - ) - else: - writable = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `entryname`, `entry`, `writable`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'Dirent'", None, _errors__) - _constructed = cls( - entryname=entryname, - entry=entry, - writable=writable, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.entryname is not None and "entryname" not in r: - r["entryname"] = save( - self.entryname, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="entryname", - val=r.get("entryname"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.entry is not None and "entry" not in r: - r["entry"] = save( - self.entry, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="entry", - val=r.get("entry"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.writable is not None and "writable" not in r: - r["writable"] = save( - self.writable, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="writable", - val=r.get("writable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["entryname", "entry", "writable"]) - - -class InitialWorkDirRequirement(ProcessRequirement): - """ - Define a list of files and subdirectories that must be staged by the workflow platform prior to executing the command line tool. - Normally files are staged within the designated output directory. However, when running inside containers, files may be staged at arbitrary locations, see discussion for [`Dirent.entryname`](#Dirent). Together with `DockerRequirement.dockerOutputDirectory` it is possible to control the locations of both input and output files when running in containers. - """ - - def __init__( - self, - listing: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "InitialWorkDirRequirement" - self.listing = listing - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InitialWorkDirRequirement): - return bool(self.class_ == other.class_ and self.listing == other.listing) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.listing)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InitialWorkDirRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "InitialWorkDirRequirement": - raise ValidationException("Not a InitialWorkDirRequirement") - - try: - listing = load_field( - _doc.get("listing"), - union_of_ExpressionLoader_or_array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'listing' field is not valid because:", - SourceLine(_doc, "listing", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `listing`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'InitialWorkDirRequirement'", None, _errors__ - ) - _constructed = cls( - listing=listing, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "InitialWorkDirRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.listing is not None and "listing" not in r: - r["listing"] = save( - self.listing, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="listing", - val=r.get("listing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "listing"]) - - -class EnvVarRequirement(ProcessRequirement): - """ - Define a list of environment variables which will be set in the - execution environment of the tool. See `EnvironmentDef` for details. - - """ - - def __init__( - self, - envDef: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "EnvVarRequirement" - self.envDef = envDef - - def __eq__(self, other: Any) -> bool: - if isinstance(other, EnvVarRequirement): - return bool(self.class_ == other.class_ and self.envDef == other.envDef) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.envDef)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "EnvVarRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "EnvVarRequirement": - raise ValidationException("Not a EnvVarRequirement") - - try: - envDef = load_field( - _doc.get("envDef"), - idmap_envDef_array_of_EnvironmentDefLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'envDef' field is not valid because:", - SourceLine(_doc, "envDef", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `envDef`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'EnvVarRequirement'", None, _errors__) - _constructed = cls( - envDef=envDef, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "EnvVarRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.envDef is not None and "envDef" not in r: - r["envDef"] = save( - self.envDef, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="envDef", - val=r.get("envDef"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "envDef"]) - - -class ShellCommandRequirement(ProcessRequirement): - """ - Modify the behavior of CommandLineTool to generate a single string - containing a shell command line. Each item in the `arguments` list must - be joined into a string separated by single spaces and quoted to prevent - interpretation by the shell, unless `CommandLineBinding` for that argument - contains `shellQuote: false`. If `shellQuote: false` is specified, the - argument is joined into the command string without quoting, which allows - the use of shell metacharacters such as `|` for pipes. - - """ - - def __init__( - self, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ShellCommandRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ShellCommandRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ShellCommandRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "ShellCommandRequirement": - raise ValidationException("Not a ShellCommandRequirement") - - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'ShellCommandRequirement'", None, _errors__ - ) - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "ShellCommandRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class ResourceRequirement(ProcessRequirement): - """ - Specify basic hardware resource requirements. - - "min" is the minimum amount of a resource that must be reserved to - schedule a job. If "min" cannot be satisfied, the job should not - be run. - - "max" is the maximum amount of a resource that the job shall be - allocated. If a node has sufficient resources, multiple jobs may - be scheduled on a single node provided each job's "max" resource - requirements are met. If a job attempts to exceed its resource - allocation, an implementation may deny additional resources, which - may result in job failure. - - If both "min" and "max" are specified, an implementation may - choose to allocate any amount between "min" and "max", with the - actual allocation provided in the `runtime` object. - - If "min" is specified but "max" is not, then "max" == "min" - If "max" is specified by "min" is not, then "min" == "max". - - It is an error if max < min. - - It is an error if the value of any of these fields is negative. - - If neither "min" nor "max" is specified for a resource, use the default values below. - - """ - - def __init__( - self, - coresMin: Optional[Any] = None, - coresMax: Optional[Any] = None, - ramMin: Optional[Any] = None, - ramMax: Optional[Any] = None, - tmpdirMin: Optional[Any] = None, - tmpdirMax: Optional[Any] = None, - outdirMin: Optional[Any] = None, - outdirMax: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ResourceRequirement" - self.coresMin = coresMin - self.coresMax = coresMax - self.ramMin = ramMin - self.ramMax = ramMax - self.tmpdirMin = tmpdirMin - self.tmpdirMax = tmpdirMax - self.outdirMin = outdirMin - self.outdirMax = outdirMax - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ResourceRequirement): - return bool( - self.class_ == other.class_ - and self.coresMin == other.coresMin - and self.coresMax == other.coresMax - and self.ramMin == other.ramMin - and self.ramMax == other.ramMax - and self.tmpdirMin == other.tmpdirMin - and self.tmpdirMax == other.tmpdirMax - and self.outdirMin == other.outdirMin - and self.outdirMax == other.outdirMax - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.class_, - self.coresMin, - self.coresMax, - self.ramMin, - self.ramMax, - self.tmpdirMin, - self.tmpdirMax, - self.outdirMin, - self.outdirMax, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ResourceRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "ResourceRequirement": - raise ValidationException("Not a ResourceRequirement") - - if "coresMin" in _doc: - try: - coresMin = load_field( - _doc.get("coresMin"), - union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'coresMin' field is not valid because:", - SourceLine(_doc, "coresMin", str), - [e], - ) - ) - else: - coresMin = None - if "coresMax" in _doc: - try: - coresMax = load_field( - _doc.get("coresMax"), - union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'coresMax' field is not valid because:", - SourceLine(_doc, "coresMax", str), - [e], - ) - ) - else: - coresMax = None - if "ramMin" in _doc: - try: - ramMin = load_field( - _doc.get("ramMin"), - union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'ramMin' field is not valid because:", - SourceLine(_doc, "ramMin", str), - [e], - ) - ) - else: - ramMin = None - if "ramMax" in _doc: - try: - ramMax = load_field( - _doc.get("ramMax"), - union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'ramMax' field is not valid because:", - SourceLine(_doc, "ramMax", str), - [e], - ) - ) - else: - ramMax = None - if "tmpdirMin" in _doc: - try: - tmpdirMin = load_field( - _doc.get("tmpdirMin"), - union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'tmpdirMin' field is not valid because:", - SourceLine(_doc, "tmpdirMin", str), - [e], - ) - ) - else: - tmpdirMin = None - if "tmpdirMax" in _doc: - try: - tmpdirMax = load_field( - _doc.get("tmpdirMax"), - union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'tmpdirMax' field is not valid because:", - SourceLine(_doc, "tmpdirMax", str), - [e], - ) - ) - else: - tmpdirMax = None - if "outdirMin" in _doc: - try: - outdirMin = load_field( - _doc.get("outdirMin"), - union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outdirMin' field is not valid because:", - SourceLine(_doc, "outdirMin", str), - [e], - ) - ) - else: - outdirMin = None - if "outdirMax" in _doc: - try: - outdirMax = load_field( - _doc.get("outdirMax"), - union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outdirMax' field is not valid because:", - SourceLine(_doc, "outdirMax", str), - [e], - ) - ) - else: - outdirMax = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `coresMin`, `coresMax`, `ramMin`, `ramMax`, `tmpdirMin`, `tmpdirMax`, `outdirMin`, `outdirMax`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'ResourceRequirement'", None, _errors__) - _constructed = cls( - coresMin=coresMin, - coresMax=coresMax, - ramMin=ramMin, - ramMax=ramMax, - tmpdirMin=tmpdirMin, - tmpdirMax=tmpdirMax, - outdirMin=outdirMin, - outdirMax=outdirMax, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "ResourceRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.coresMin is not None and "coresMin" not in r: - r["coresMin"] = save( - self.coresMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="coresMin", - val=r.get("coresMin"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.coresMax is not None and "coresMax" not in r: - r["coresMax"] = save( - self.coresMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="coresMax", - val=r.get("coresMax"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.ramMin is not None and "ramMin" not in r: - r["ramMin"] = save( - self.ramMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="ramMin", - val=r.get("ramMin"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.ramMax is not None and "ramMax" not in r: - r["ramMax"] = save( - self.ramMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="ramMax", - val=r.get("ramMax"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.tmpdirMin is not None and "tmpdirMin" not in r: - r["tmpdirMin"] = save( - self.tmpdirMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="tmpdirMin", - val=r.get("tmpdirMin"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.tmpdirMax is not None and "tmpdirMax" not in r: - r["tmpdirMax"] = save( - self.tmpdirMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="tmpdirMax", - val=r.get("tmpdirMax"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outdirMin is not None and "outdirMin" not in r: - r["outdirMin"] = save( - self.outdirMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outdirMin", - val=r.get("outdirMin"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outdirMax is not None and "outdirMax" not in r: - r["outdirMax"] = save( - self.outdirMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outdirMax", - val=r.get("outdirMax"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "class", - "coresMin", - "coresMax", - "ramMin", - "ramMax", - "tmpdirMin", - "tmpdirMax", - "outdirMin", - "outdirMax", - ] - ) - - -class WorkReuse(ProcessRequirement): - """ - For implementations that support reusing output from past work (on - the assumption that same code and same input produce same - results), control whether to enable or disable the reuse behavior - for a particular tool or step (to accommodate situations where that - assumption is incorrect). A reused step is not executed but - instead returns the same output as the original execution. - - If `WorkReuse` is not specified, correct tools should assume it - is enabled by default. - - """ - - def __init__( - self, - enableReuse: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "WorkReuse" - self.enableReuse = enableReuse - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkReuse): - return bool( - self.class_ == other.class_ and self.enableReuse == other.enableReuse - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.enableReuse)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "WorkReuse": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "WorkReuse": - raise ValidationException("Not a WorkReuse") - - try: - enableReuse = load_field( - _doc.get("enableReuse"), - union_of_booltype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'enableReuse' field is not valid because:", - SourceLine(_doc, "enableReuse", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `enableReuse`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'WorkReuse'", None, _errors__) - _constructed = cls( - enableReuse=enableReuse, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "WorkReuse" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.enableReuse is not None and "enableReuse" not in r: - r["enableReuse"] = save( - self.enableReuse, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="enableReuse", - val=r.get("enableReuse"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "enableReuse"]) - - -class NetworkAccess(ProcessRequirement): - """ - Indicate whether a process requires outgoing IPv4/IPv6 network - access. Choice of IPv4 or IPv6 is implementation and site - specific, correct tools must support both. - - If `networkAccess` is false or not specified, tools must not - assume network access, except for localhost (the loopback device). - - If `networkAccess` is true, the tool must be able to make outgoing - connections to network resources. Resources may be on a private - subnet or the public Internet. However, implementations and sites - may apply their own security policies to restrict what is - accessible by the tool. - - Enabling network access does not imply a publicly routable IP - address or the ability to accept inbound connections. - - """ - - def __init__( - self, - networkAccess: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "NetworkAccess" - self.networkAccess = networkAccess - - def __eq__(self, other: Any) -> bool: - if isinstance(other, NetworkAccess): - return bool( - self.class_ == other.class_ - and self.networkAccess == other.networkAccess - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.networkAccess)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "NetworkAccess": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "NetworkAccess": - raise ValidationException("Not a NetworkAccess") - - try: - networkAccess = load_field( - _doc.get("networkAccess"), - union_of_booltype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'networkAccess' field is not valid because:", - SourceLine(_doc, "networkAccess", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `networkAccess`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'NetworkAccess'", None, _errors__) - _constructed = cls( - networkAccess=networkAccess, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "NetworkAccess" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.networkAccess is not None and "networkAccess" not in r: - r["networkAccess"] = save( - self.networkAccess, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="networkAccess", - val=r.get("networkAccess"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "networkAccess"]) - - -class InplaceUpdateRequirement(ProcessRequirement): - """ - - If `inplaceUpdate` is true, then an implementation supporting this - feature may permit tools to directly update files with `writable: - true` in InitialWorkDirRequirement. That is, as an optimization, - files may be destructively modified in place as opposed to copied - and updated. - - An implementation must ensure that only one workflow step may - access a writable file at a time. It is an error if a file which - is writable by one workflow step file is accessed (for reading or - writing) by any other workflow step running independently. - However, a file which has been updated in a previous completed - step may be used as input to multiple steps, provided it is - read-only in every step. - - Workflow steps which modify a file must produce the modified file - as output. Downstream steps which further process the file must - use the output of previous steps, and not refer to a common input - (this is necessary for both ordering and correctness). - - Workflow authors should provide this in the `hints` section. The - intent of this feature is that workflows produce the same results - whether or not InplaceUpdateRequirement is supported by the - implementation, and this feature is primarily available as an - optimization for particular environments. - - Users and implementers should be aware that workflows that - destructively modify inputs may not be repeatable or reproducible. - In particular, enabling this feature implies that WorkReuse should - not be enabled. - - """ - - def __init__( - self, - inplaceUpdate: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "InplaceUpdateRequirement" - self.inplaceUpdate = inplaceUpdate - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InplaceUpdateRequirement): - return bool( - self.class_ == other.class_ - and self.inplaceUpdate == other.inplaceUpdate - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.inplaceUpdate)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "InplaceUpdateRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "InplaceUpdateRequirement": - raise ValidationException("Not a InplaceUpdateRequirement") - - try: - inplaceUpdate = load_field( - _doc.get("inplaceUpdate"), - booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inplaceUpdate' field is not valid because:", - SourceLine(_doc, "inplaceUpdate", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `inplaceUpdate`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'InplaceUpdateRequirement'", None, _errors__ - ) - _constructed = cls( - inplaceUpdate=inplaceUpdate, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "InplaceUpdateRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.inplaceUpdate is not None and "inplaceUpdate" not in r: - r["inplaceUpdate"] = save( - self.inplaceUpdate, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inplaceUpdate", - val=r.get("inplaceUpdate"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "inplaceUpdate"]) - - -class ToolTimeLimit(ProcessRequirement): - """ - Set an upper limit on the execution time of a CommandLineTool. - A CommandLineTool whose execution duration exceeds the time - limit may be preemptively terminated and considered failed. - May also be used by batch systems to make scheduling decisions. - The execution duration excludes external operations, such as - staging of files, pulling a docker image etc, and only counts - wall-time for the execution of the command line itself. - - """ - - def __init__( - self, - timelimit: Any, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ToolTimeLimit" - self.timelimit = timelimit - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ToolTimeLimit): - return bool( - self.class_ == other.class_ and self.timelimit == other.timelimit - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.timelimit)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ToolTimeLimit": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "ToolTimeLimit": - raise ValidationException("Not a ToolTimeLimit") - - try: - timelimit = load_field( - _doc.get("timelimit"), - union_of_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'timelimit' field is not valid because:", - SourceLine(_doc, "timelimit", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `timelimit`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'ToolTimeLimit'", None, _errors__) - _constructed = cls( - timelimit=timelimit, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "ToolTimeLimit" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.timelimit is not None and "timelimit" not in r: - r["timelimit"] = save( - self.timelimit, - top=False, - base_url=base_url, - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="timelimit", - val=r.get("timelimit"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "timelimit"]) - - -class ExpressionToolOutputParameter(OutputParameter): - def __init__( - self, - type: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - id: Optional[Any] = None, - format: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.format = format - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ExpressionToolOutputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.type == other.type - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.type, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ExpressionToolOutputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'ExpressionToolOutputParameter'", None, _errors__ - ) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] - ) - - -class WorkflowInputParameter(InputParameter): - def __init__( - self, - type: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - id: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - default: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.format = format - self.loadContents = loadContents - self.loadListing = loadListing - self.default = default - self.type = type - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowInputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.default == other.default - and self.type == other.type - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.loadContents, - self.loadListing, - self.default, - self.type, - self.inputBinding, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "WorkflowInputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadListing' field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - ) - ) - else: - loadListing = None - if "default" in _doc: - try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'default' field is not valid because:", - SourceLine(_doc, "default", str), - [e], - ) - ) - else: - default = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_InputBindingLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputBinding' field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - ) - ) - else: - inputBinding = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'WorkflowInputParameter'", None, _errors__ - ) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - loadContents=loadContents, - loadListing=loadListing, - default=default, - type=type, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadListing is not None and "loadListing" not in r: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadListing", - val=r.get("loadListing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.default is not None and "default" not in r: - r["default"] = save( - self.default, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="default", - val=r.get("default"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputBinding is not None and "inputBinding" not in r: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputBinding", - val=r.get("inputBinding"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "format", - "loadContents", - "loadListing", - "default", - "type", - "inputBinding", - ] - ) - - -class ExpressionTool(Process): - """ - An ExpressionTool is a type of Process object that can be run by itself - or as a Workflow step. It executes a pure Javascript expression that has - access to the same input parameters as a workflow. It is meant to be used - sparingly as a way to isolate complex Javascript expressions that need to - operate on input data and produce some result; perhaps just a - rearrangement of the inputs. No Docker software container is required - or allowed. - - """ - - def __init__( - self, - inputs: Any, - outputs: Any, - expression: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - intent: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - self.label = label - self.doc = doc - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.cwlVersion = cwlVersion - self.intent = intent - self.class_ = "ExpressionTool" - self.expression = expression - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ExpressionTool): - return bool( - self.id == other.id - and self.label == other.label - and self.doc == other.doc - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.cwlVersion == other.cwlVersion - and self.intent == other.intent - and self.class_ == other.class_ - and self.expression == other.expression - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.label, - self.doc, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.cwlVersion, - self.intent, - self.class_, - self.expression, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ExpressionTool": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "ExpressionTool": - raise ValidationException("Not a ExpressionTool") - - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_WorkflowInputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputs' field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - ) - ) - try: - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_ExpressionToolOutputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputs' field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - ) - ) - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'requirements' field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - ) - ) - else: - requirements = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'hints' field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - ) - ) - else: - hints = None - if "cwlVersion" in _doc: - try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'cwlVersion' field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [e], - ) - ) - else: - cwlVersion = None - if "intent" in _doc: - try: - intent = load_field( - _doc.get("intent"), - uri_union_of_None_type_or_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'intent' field is not valid because:", - SourceLine(_doc, "intent", str), - [e], - ) - ) - else: - intent = None - try: - expression = load_field( - _doc.get("expression"), - ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'expression' field is not valid because:", - SourceLine(_doc, "expression", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `expression`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'ExpressionTool'", None, _errors__) - _constructed = cls( - id=id, - label=label, - doc=doc, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - cwlVersion=cwlVersion, - intent=intent, - expression=expression, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "ExpressionTool" - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputs is not None and "inputs" not in r: - r["inputs"] = save( - self.inputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputs", - val=r.get("inputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputs is not None and "outputs" not in r: - r["outputs"] = save( - self.outputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputs", - val=r.get("outputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.requirements is not None and "requirements" not in r: - r["requirements"] = save( - self.requirements, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="requirements", - val=r.get("requirements"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.hints is not None and "hints" not in r: - r["hints"] = save( - self.hints, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="hints", - val=r.get("hints"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) - r["cwlVersion"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="cwlVersion", - val=r.get("cwlVersion"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.intent is not None and "intent" not in r: - u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) - r["intent"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="intent", - val=r.get("intent"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.expression is not None and "expression" not in r: - r["expression"] = save( - self.expression, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="expression", - val=r.get("expression"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "label", - "doc", - "inputs", - "outputs", - "requirements", - "hints", - "cwlVersion", - "intent", - "class", - "expression", - ] - ) - - -class WorkflowOutputParameter(OutputParameter): - """ - Describe an output parameter of a workflow. The parameter must be - connected to one or more parameters defined in the workflow that - will provide the value of the output parameter. It is legal to - connect a WorkflowInputParameter to a WorkflowOutputParameter. - - See [WorkflowStepInput](#WorkflowStepInput) for discussion of - `linkMerge` and `pickValue`. - - """ - - def __init__( - self, - type: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - id: Optional[Any] = None, - format: Optional[Any] = None, - outputSource: Optional[Any] = None, - linkMerge: Optional[Any] = None, - pickValue: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.format = format - self.outputSource = outputSource - self.linkMerge = linkMerge - self.pickValue = pickValue - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowOutputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.outputSource == other.outputSource - and self.linkMerge == other.linkMerge - and self.pickValue == other.pickValue - and self.type == other.type - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.outputSource, - self.linkMerge, - self.pickValue, - self.type, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "WorkflowOutputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "outputSource" in _doc: - try: - outputSource = load_field( - _doc.get("outputSource"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputSource' field is not valid because:", - SourceLine(_doc, "outputSource", str), - [e], - ) - ) - else: - outputSource = None - if "linkMerge" in _doc: - try: - linkMerge = load_field( - _doc.get("linkMerge"), - union_of_None_type_or_LinkMergeMethodLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'linkMerge' field is not valid because:", - SourceLine(_doc, "linkMerge", str), - [e], - ) - ) - else: - linkMerge = None - if "pickValue" in _doc: - try: - pickValue = load_field( - _doc.get("pickValue"), - union_of_None_type_or_PickValueMethodLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'pickValue' field is not valid because:", - SourceLine(_doc, "pickValue", str), - [e], - ) - ) - else: - pickValue = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `outputSource`, `linkMerge`, `pickValue`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'WorkflowOutputParameter'", None, _errors__ - ) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - outputSource=outputSource, - linkMerge=linkMerge, - pickValue=pickValue, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputSource is not None and "outputSource" not in r: - u = save_relative_uri( - self.outputSource, str(self.id), False, 1, relative_uris - ) - r["outputSource"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputSource", - val=r.get("outputSource"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.linkMerge is not None and "linkMerge" not in r: - r["linkMerge"] = save( - self.linkMerge, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="linkMerge", - val=r.get("linkMerge"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.pickValue is not None and "pickValue" not in r: - r["pickValue"] = save( - self.pickValue, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="pickValue", - val=r.get("pickValue"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "format", - "outputSource", - "linkMerge", - "pickValue", - "type", - ] - ) - - -class Sink(Saveable): - pass - - -class WorkflowStepInput(Identified, Sink, LoadContents, Labeled): - """ - The input of a workflow step connects an upstream parameter (from the - workflow inputs, or the outputs of other workflows steps) with the input - parameters of the process specified by the `run` field. Only input parameters - declared by the target process will be passed through at runtime to the process - though additional parameters may be specified (for use within `valueFrom` - expressions for instance) - unconnected or unused parameters do not represent an - error condition. - - # Input object - - A WorkflowStepInput object must contain an `id` field in the form - `#fieldname` or `#prefix/fieldname`. When the `id` field contains a slash - `/` the field name consists of the characters following the final slash - (the prefix portion may contain one or more slashes to indicate scope). - This defines a field of the workflow step input object with the value of - the `source` parameter(s). - - # Merging multiple inbound data links - - To merge multiple inbound data links, - [MultipleInputFeatureRequirement](#MultipleInputFeatureRequirement) must be specified - in the workflow or workflow step requirements. - - If the sink parameter is an array, or named in a [workflow - scatter](#WorkflowStep) operation, there may be multiple inbound - data links listed in the `source` field. The values from the - input links are merged depending on the method specified in the - `linkMerge` field. If both `linkMerge` and `pickValue` are null - or not specified, and there is more than one element in the - `source` array, the default method is "merge_nested". - - If both `linkMerge` and `pickValue` are null or not specified, and - there is only a single element in the `source`, then the input - parameter takes the scalar value from the single input link (it is - *not* wrapped in a single-list). - - * **merge_nested** - - The input must be an array consisting of exactly one entry for each - input link. If "merge_nested" is specified with a single link, the value - from the link must be wrapped in a single-item list. - - * **merge_flattened** - - 1. The source and sink parameters must be compatible types, or the source - type must be compatible with single element from the "items" type of - the destination array parameter. - 2. Source parameters which are arrays are concatenated. - Source parameters which are single element types are appended as - single elements. - - # Picking non-null values among inbound data links - - If present, `pickValue` specifies how to pick non-null values among inbound data links. - - `pickValue` is evaluated - 1. Once all source values from upstream step or parameters are available. - 2. After `linkMerge`. - 3. Before `scatter` or `valueFrom`. - - This is specifically intended to be useful in combination with - [conditional execution](#WorkflowStep), where several upstream - steps may be connected to a single input (`source` is a list), and - skipped steps produce null values. - - Static type checkers should check for type consistency after inferring what the type - will be after `pickValue` is applied, just as they do currently for `linkMerge`. - - * **first_non_null** - - For the first level of a list input, pick the first non-null element. The result is a scalar. - It is an error if there is no non-null element. Examples: - * `[null, x, null, y] -> x` - * `[null, [null], null, y] -> [null]` - * `[null, null, null] -> Runtime Error` - - *Intended use case*: If-else pattern where the - value comes either from a conditional step or from a default or - fallback value. The conditional step(s) should be placed first in - the list. - - * **the_only_non_null** - - For the first level of a list input, pick the single non-null element. The result is a scalar. - It is an error if there is more than one non-null element. Examples: - - * `[null, x, null] -> x` - * `[null, x, null, y] -> Runtime Error` - * `[null, [null], null] -> [null]` - * `[null, null, null] -> Runtime Error` - - *Intended use case*: Switch type patterns where developer considers - more than one active code path as a workflow error - (possibly indicating an error in writing `when` condition expressions). - - * **all_non_null** - - For the first level of a list input, pick all non-null values. - The result is a list, which may be empty. Examples: - - * `[null, x, null] -> [x]` - * `[x, null, y] -> [x, y]` - * `[null, [x], [null]] -> [[x], [null]]` - * `[null, null, null] -> []` - - *Intended use case*: It is valid to have more than one source, but - sources are conditional, so null sources (from skipped steps) - should be filtered out. - - """ - - def __init__( - self, - id: Optional[Any] = None, - source: Optional[Any] = None, - linkMerge: Optional[Any] = None, - pickValue: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - label: Optional[Any] = None, - default: Optional[Any] = None, - valueFrom: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - self.source = source - self.linkMerge = linkMerge - self.pickValue = pickValue - self.loadContents = loadContents - self.loadListing = loadListing - self.label = label - self.default = default - self.valueFrom = valueFrom - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowStepInput): - return bool( - self.id == other.id - and self.source == other.source - and self.linkMerge == other.linkMerge - and self.pickValue == other.pickValue - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.label == other.label - and self.default == other.default - and self.valueFrom == other.valueFrom - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.source, - self.linkMerge, - self.pickValue, - self.loadContents, - self.loadListing, - self.label, - self.default, - self.valueFrom, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "WorkflowStepInput": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "source" in _doc: - try: - source = load_field( - _doc.get("source"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'source' field is not valid because:", - SourceLine(_doc, "source", str), - [e], - ) - ) - else: - source = None - if "linkMerge" in _doc: - try: - linkMerge = load_field( - _doc.get("linkMerge"), - union_of_None_type_or_LinkMergeMethodLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'linkMerge' field is not valid because:", - SourceLine(_doc, "linkMerge", str), - [e], - ) - ) - else: - linkMerge = None - if "pickValue" in _doc: - try: - pickValue = load_field( - _doc.get("pickValue"), - union_of_None_type_or_PickValueMethodLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'pickValue' field is not valid because:", - SourceLine(_doc, "pickValue", str), - [e], - ) - ) - else: - pickValue = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadListing' field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - ) - ) - else: - loadListing = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "default" in _doc: - try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'default' field is not valid because:", - SourceLine(_doc, "default", str), - [e], - ) - ) - else: - default = None - if "valueFrom" in _doc: - try: - valueFrom = load_field( - _doc.get("valueFrom"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'valueFrom' field is not valid because:", - SourceLine(_doc, "valueFrom", str), - [e], - ) - ) - else: - valueFrom = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `source`, `linkMerge`, `pickValue`, `loadContents`, `loadListing`, `label`, `default`, `valueFrom`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'WorkflowStepInput'", None, _errors__) - _constructed = cls( - id=id, - source=source, - linkMerge=linkMerge, - pickValue=pickValue, - loadContents=loadContents, - loadListing=loadListing, - label=label, - default=default, - valueFrom=valueFrom, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.source is not None and "source" not in r: - u = save_relative_uri(self.source, str(self.id), False, 2, relative_uris) - r["source"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="source", - val=r.get("source"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.linkMerge is not None and "linkMerge" not in r: - r["linkMerge"] = save( - self.linkMerge, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="linkMerge", - val=r.get("linkMerge"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.pickValue is not None and "pickValue" not in r: - r["pickValue"] = save( - self.pickValue, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="pickValue", - val=r.get("pickValue"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadListing is not None and "loadListing" not in r: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadListing", - val=r.get("loadListing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.default is not None and "default" not in r: - r["default"] = save( - self.default, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="default", - val=r.get("default"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.valueFrom is not None and "valueFrom" not in r: - r["valueFrom"] = save( - self.valueFrom, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="valueFrom", - val=r.get("valueFrom"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "source", - "linkMerge", - "pickValue", - "loadContents", - "loadListing", - "label", - "default", - "valueFrom", - ] - ) - - -class WorkflowStepOutput(Identified): - """ - Associate an output parameter of the underlying process with a workflow - parameter. The workflow parameter (given in the `id` field) be may be used - as a `source` to connect with input parameters of other workflow steps, or - with an output parameter of the process. - - A unique identifier for this workflow output parameter. This is - the identifier to use in the `source` field of `WorkflowStepInput` - to connect the output value to downstream parameters. - - """ - - def __init__( - self, - id: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowStepOutput): - return bool(self.id == other.id) - return False - - def __hash__(self) -> int: - return hash((self.id)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "WorkflowStepOutput": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'WorkflowStepOutput'", None, _errors__) - _constructed = cls( - id=id, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["id"]) - - -class WorkflowStep(Identified, Labeled, Documented): - """ - A workflow step is an executable element of a workflow. It specifies the - underlying process implementation (such as `CommandLineTool` or another - `Workflow`) in the `run` field and connects the input and output parameters - of the underlying process to workflow parameters. - - # Scatter/gather - - To use scatter/gather, - [ScatterFeatureRequirement](#ScatterFeatureRequirement) must be specified - in the workflow or workflow step requirements. - - A "scatter" operation specifies that the associated workflow step or - subworkflow should execute separately over a list of input elements. Each - job making up a scatter operation is independent and may be executed - concurrently. - - The `scatter` field specifies one or more input parameters which will be - scattered. An input parameter may be listed more than once. The declared - type of each input parameter implicitly becomes an array of items of the - input parameter type. If a parameter is listed more than once, it becomes - a nested array. As a result, upstream parameters which are connected to - scattered parameters must be arrays. - - All output parameter types are also implicitly wrapped in arrays. Each job - in the scatter results in an entry in the output array. - - If any scattered parameter runtime value is an empty array, all outputs are - set to empty arrays and no work is done for the step, according to - applicable scattering rules. - - If `scatter` declares more than one input parameter, `scatterMethod` - describes how to decompose the input into a discrete set of jobs. - - * **dotproduct** specifies that each of the input arrays are aligned and one - element taken from each array to construct each job. It is an error - if all input arrays are not the same length. - - * **nested_crossproduct** specifies the Cartesian product of the inputs, - producing a job for every combination of the scattered inputs. The - output must be nested arrays for each level of scattering, in the - order that the input arrays are listed in the `scatter` field. - - * **flat_crossproduct** specifies the Cartesian product of the inputs, - producing a job for every combination of the scattered inputs. The - output arrays must be flattened to a single level, but otherwise listed in the - order that the input arrays are listed in the `scatter` field. - - # Conditional execution (Optional) - - Conditional execution makes execution of a step conditional on an - expression. A step that is not executed is "skipped". A skipped - step produces `null` for all output parameters. - - The condition is evaluated after `scatter`, using the input object - of each individual scatter job. This means over a set of scatter - jobs, some may be executed and some may be skipped. When the - results are gathered, skipped steps must be `null` in the output - arrays. - - The `when` field controls conditional execution. This is an - expression that must be evaluated with `inputs` bound to the step - input object (or individual scatter job), and returns a boolean - value. It is an error if this expression returns a value other - than `true` or `false`. - - Conditionals in CWL are an optional feature and are not required - to be implemented by all consumers of CWL documents. An - implementation that does not support conditionals must return a - fatal error when attempting to execute a workflow that uses - conditional constructs the implementation does not support. - - # Subworkflows - - To specify a nested workflow as part of a workflow step, - [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) must be - specified in the workflow or workflow step requirements. - - It is a fatal error if a workflow directly or indirectly invokes itself as - a subworkflow (recursive workflows are not allowed). - - """ - - def __init__( - self, - in_: Any, - out: Any, - run: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - when: Optional[Any] = None, - scatter: Optional[Any] = None, - scatterMethod: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - self.label = label - self.doc = doc - self.in_ = in_ - self.out = out - self.requirements = requirements - self.hints = hints - self.run = run - self.when = when - self.scatter = scatter - self.scatterMethod = scatterMethod - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowStep): - return bool( - self.id == other.id - and self.label == other.label - and self.doc == other.doc - and self.in_ == other.in_ - and self.out == other.out - and self.requirements == other.requirements - and self.hints == other.hints - and self.run == other.run - and self.when == other.when - and self.scatter == other.scatter - and self.scatterMethod == other.scatterMethod - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.label, - self.doc, - self.in_, - self.out, - self.requirements, - self.hints, - self.run, - self.when, - self.scatter, - self.scatterMethod, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "WorkflowStep": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - in_ = load_field( - _doc.get("in"), - idmap_in__array_of_WorkflowStepInputLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'in' field is not valid because:", - SourceLine(_doc, "in", str), - [e], - ) - ) - try: - out = load_field( - _doc.get("out"), - uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'out' field is not valid because:", - SourceLine(_doc, "out", str), - [e], - ) - ) - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'requirements' field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - ) - ) - else: - requirements = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'hints' field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - ) - ) - else: - hints = None - - subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) - try: - run = load_field( - _doc.get("run"), - uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_False_False_None, - subscope_baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'run' field is not valid because:", - SourceLine(_doc, "run", str), - [e], - ) - ) - if "when" in _doc: - try: - when = load_field( - _doc.get("when"), - union_of_None_type_or_ExpressionLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'when' field is not valid because:", - SourceLine(_doc, "when", str), - [e], - ) - ) - else: - when = None - if "scatter" in _doc: - try: - scatter = load_field( - _doc.get("scatter"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'scatter' field is not valid because:", - SourceLine(_doc, "scatter", str), - [e], - ) - ) - else: - scatter = None - if "scatterMethod" in _doc: - try: - scatterMethod = load_field( - _doc.get("scatterMethod"), - uri_union_of_None_type_or_ScatterMethodLoader_False_True_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'scatterMethod' field is not valid because:", - SourceLine(_doc, "scatterMethod", str), - [e], - ) - ) - else: - scatterMethod = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `in`, `out`, `requirements`, `hints`, `run`, `when`, `scatter`, `scatterMethod`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'WorkflowStep'", None, _errors__) - _constructed = cls( - id=id, - label=label, - doc=doc, - in_=in_, - out=out, - requirements=requirements, - hints=hints, - run=run, - when=when, - scatter=scatter, - scatterMethod=scatterMethod, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.in_ is not None and "in" not in r: - r["in"] = save( - self.in_, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="in", - val=r.get("in"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.out is not None and "out" not in r: - u = save_relative_uri(self.out, str(self.id), True, None, relative_uris) - r["out"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="out", - val=r.get("out"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.requirements is not None and "requirements" not in r: - r["requirements"] = save( - self.requirements, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="requirements", - val=r.get("requirements"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.hints is not None and "hints" not in r: - r["hints"] = save( - self.hints, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="hints", - val=r.get("hints"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.run is not None and "run" not in r: - u = save_relative_uri(self.run, str(self.id), False, None, relative_uris) - r["run"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="run", - val=r.get("run"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.when is not None and "when" not in r: - r["when"] = save( - self.when, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="when", - val=r.get("when"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.scatter is not None and "scatter" not in r: - u = save_relative_uri(self.scatter, str(self.id), False, 0, relative_uris) - r["scatter"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="scatter", - val=r.get("scatter"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.scatterMethod is not None and "scatterMethod" not in r: - u = save_relative_uri( - self.scatterMethod, str(self.id), False, None, relative_uris - ) - r["scatterMethod"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="scatterMethod", - val=r.get("scatterMethod"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "label", - "doc", - "in", - "out", - "requirements", - "hints", - "run", - "when", - "scatter", - "scatterMethod", - ] - ) - - -class Workflow(Process): - """ - A workflow describes a set of **steps** and the **dependencies** between - those steps. When a step produces output that will be consumed by a - second step, the first step is a dependency of the second step. - - When there is a dependency, the workflow engine must execute the preceding - step and wait for it to successfully produce output before executing the - dependent step. If two steps are defined in the workflow graph that - are not directly or indirectly dependent, these steps are **independent**, - and may execute in any order or execute concurrently. A workflow is - complete when all steps have been executed. - - Dependencies between parameters are expressed using the `source` - field on [workflow step input parameters](#WorkflowStepInput) and - `outputSource` field on [workflow output - parameters](#WorkflowOutputParameter). - - The `source` field on each workflow step input parameter expresses - the data links that contribute to the value of the step input - parameter (the "sink"). A workflow step can only begin execution - when every data link connected to a step has been fulfilled. - - The `outputSource` field on each workflow step input parameter - expresses the data links that contribute to the value of the - workflow output parameter (the "sink"). Workflow execution cannot - complete successfully until every data link connected to an output - parameter has been fulfilled. - - ## Workflow success and failure - - A completed step must result in one of `success`, `temporaryFailure` or - `permanentFailure` states. An implementation may choose to retry a step - execution which resulted in `temporaryFailure`. An implementation may - choose to either continue running other steps of a workflow, or terminate - immediately upon `permanentFailure`. - - * If any step of a workflow execution results in `permanentFailure`, then - the workflow status is `permanentFailure`. - - * If one or more steps result in `temporaryFailure` and all other steps - complete `success` or are not executed, then the workflow status is - `temporaryFailure`. - - * If all workflow steps are executed and complete with `success`, then the - workflow status is `success`. - - # Extensions - - [ScatterFeatureRequirement](#ScatterFeatureRequirement) and - [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) are - available as standard [extensions](#Extensions_and_Metadata) to core - workflow semantics. - - """ - - def __init__( - self, - inputs: Any, - outputs: Any, - steps: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - intent: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - self.label = label - self.doc = doc - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.cwlVersion = cwlVersion - self.intent = intent - self.class_ = "Workflow" - self.steps = steps - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Workflow): - return bool( - self.id == other.id - and self.label == other.label - and self.doc == other.doc - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.cwlVersion == other.cwlVersion - and self.intent == other.intent - and self.class_ == other.class_ - and self.steps == other.steps - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.label, - self.doc, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.cwlVersion, - self.intent, - self.class_, - self.steps, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "Workflow": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "Workflow": - raise ValidationException("Not a Workflow") - - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_WorkflowInputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputs' field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - ) - ) - try: - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_WorkflowOutputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputs' field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - ) - ) - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'requirements' field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - ) - ) - else: - requirements = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'hints' field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - ) - ) - else: - hints = None - if "cwlVersion" in _doc: - try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'cwlVersion' field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [e], - ) - ) - else: - cwlVersion = None - if "intent" in _doc: - try: - intent = load_field( - _doc.get("intent"), - uri_union_of_None_type_or_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'intent' field is not valid because:", - SourceLine(_doc, "intent", str), - [e], - ) - ) - else: - intent = None - try: - steps = load_field( - _doc.get("steps"), - idmap_steps_union_of_array_of_WorkflowStepLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'steps' field is not valid because:", - SourceLine(_doc, "steps", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `steps`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'Workflow'", None, _errors__) - _constructed = cls( - id=id, - label=label, - doc=doc, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - cwlVersion=cwlVersion, - intent=intent, - steps=steps, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "Workflow" - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputs is not None and "inputs" not in r: - r["inputs"] = save( - self.inputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputs", - val=r.get("inputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputs is not None and "outputs" not in r: - r["outputs"] = save( - self.outputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputs", - val=r.get("outputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.requirements is not None and "requirements" not in r: - r["requirements"] = save( - self.requirements, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="requirements", - val=r.get("requirements"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.hints is not None and "hints" not in r: - r["hints"] = save( - self.hints, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="hints", - val=r.get("hints"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) - r["cwlVersion"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="cwlVersion", - val=r.get("cwlVersion"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.intent is not None and "intent" not in r: - u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) - r["intent"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="intent", - val=r.get("intent"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.steps is not None and "steps" not in r: - r["steps"] = save( - self.steps, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="steps", - val=r.get("steps"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "label", - "doc", - "inputs", - "outputs", - "requirements", - "hints", - "cwlVersion", - "intent", - "class", - "steps", - ] - ) - - -class SubworkflowFeatureRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support nested workflows in - the `run` field of [WorkflowStep](#WorkflowStep). - - """ - - def __init__( - self, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "SubworkflowFeatureRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SubworkflowFeatureRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "SubworkflowFeatureRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "SubworkflowFeatureRequirement": - raise ValidationException("Not a SubworkflowFeatureRequirement") - - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'SubworkflowFeatureRequirement'", None, _errors__ - ) - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "SubworkflowFeatureRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class ScatterFeatureRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support the `scatter` and - `scatterMethod` fields of [WorkflowStep](#WorkflowStep). - - """ - - def __init__( - self, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ScatterFeatureRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ScatterFeatureRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "ScatterFeatureRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "ScatterFeatureRequirement": - raise ValidationException("Not a ScatterFeatureRequirement") - - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'ScatterFeatureRequirement'", None, _errors__ - ) - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "ScatterFeatureRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class MultipleInputFeatureRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support multiple inbound data links - listed in the `source` field of [WorkflowStepInput](#WorkflowStepInput). - - """ - - def __init__( - self, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "MultipleInputFeatureRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, MultipleInputFeatureRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "MultipleInputFeatureRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "MultipleInputFeatureRequirement": - raise ValidationException("Not a MultipleInputFeatureRequirement") - - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'MultipleInputFeatureRequirement'", None, _errors__ - ) - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "MultipleInputFeatureRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class StepInputExpressionRequirement(ProcessRequirement): - """ - Indicate that the workflow platform must support the `valueFrom` field - of [WorkflowStepInput](#WorkflowStepInput). - - """ - - def __init__( - self, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "StepInputExpressionRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, StepInputExpressionRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "StepInputExpressionRequirement": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "StepInputExpressionRequirement": - raise ValidationException("Not a StepInputExpressionRequirement") - - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'StepInputExpressionRequirement'", None, _errors__ - ) - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "StepInputExpressionRequirement" - - if doc: - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class OperationInputParameter(InputParameter): - """ - Describe an input parameter of an operation. - - """ - - def __init__( - self, - type: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - id: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - default: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.format = format - self.loadContents = loadContents - self.loadListing = loadListing - self.default = default - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, OperationInputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.default == other.default - and self.type == other.type - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.loadContents, - self.loadListing, - self.default, - self.type, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "OperationInputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadContents' field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - ) - ) - else: - loadContents = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'loadListing' field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - ) - ) - else: - loadListing = None - if "default" in _doc: - try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'default' field is not valid because:", - SourceLine(_doc, "default", str), - [e], - ) - ) - else: - default = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'OperationInputParameter'", None, _errors__ - ) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - loadContents=loadContents, - loadListing=loadListing, - default=default, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadContents is not None and "loadContents" not in r: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadContents", - val=r.get("loadContents"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.loadListing is not None and "loadListing" not in r: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="loadListing", - val=r.get("loadListing"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.default is not None and "default" not in r: - r["default"] = save( - self.default, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="default", - val=r.get("default"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "format", - "loadContents", - "loadListing", - "default", - "type", - ] - ) - - -class OperationOutputParameter(OutputParameter): - """ - Describe an output parameter of an operation. - - """ - - def __init__( - self, - type: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - id: Optional[Any] = None, - format: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id - self.format = format - self.type = type - - def __eq__(self, other: Any) -> bool: - if isinstance(other, OperationOutputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.type == other.type - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.type, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "OperationOutputParameter": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'secondaryFiles' field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - ) - ) - else: - secondaryFiles = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'streamable' field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - ) - ) - else: - streamable = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'format' field is not valid because:", - SourceLine(_doc, "format", str), - [e], - ) - ) - else: - format = None - try: - type = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'type' field is not valid because:", - SourceLine(_doc, "type", str), - [e], - ) - ) - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException( - "Trying 'OperationOutputParameter'", None, _errors__ - ) - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - type=type, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.secondaryFiles is not None and "secondaryFiles" not in r: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="secondaryFiles", - val=r.get("secondaryFiles"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.streamable is not None and "streamable" not in r: - r["streamable"] = save( - self.streamable, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="streamable", - val=r.get("streamable"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.format is not None and "format" not in r: - u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) - r["format"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="format", - val=r.get("format"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.type is not None and "type" not in r: - r["type"] = save( - self.type, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="type", - val=r.get("type"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] - ) - - -class Operation(Process): - """ - This record describes an abstract operation. It is a potential - step of a workflow that has not yet been bound to a concrete - implementation. It specifies an input and output signature, but - does not provide enough information to be executed. An - implementation (or other tooling) may provide a means of binding - an Operation to a concrete process (such as Workflow, - CommandLineTool, or ExpressionTool) with a compatible signature. - - """ - - def __init__( - self, - inputs: Any, - outputs: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - intent: Optional[Any] = None, - extension_fields: Optional[Dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id - self.label = label - self.doc = doc - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.cwlVersion = cwlVersion - self.intent = intent - self.class_ = "Operation" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Operation): - return bool( - self.id == other.id - and self.label == other.label - and self.doc == other.doc - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.cwlVersion == other.cwlVersion - and self.intent == other.intent - and self.class_ == other.class_ - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.label, - self.doc, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.cwlVersion, - self.intent, - self.class_, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "Operation": - _doc = copy.copy(doc) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - - if _doc.get("class") != "Operation": - raise ValidationException("Not a Operation") - - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'id' field is not valid because:", - SourceLine(_doc, "id", str), - [e], - ) - ) - else: - id = None - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = id - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'label' field is not valid because:", - SourceLine(_doc, "label", str), - [e], - ) - ) - else: - label = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'doc' field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - ) - ) - else: - doc = None - try: - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_OperationInputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'inputs' field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - ) - ) - try: - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_OperationOutputParameterLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'outputs' field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - ) - ) - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'requirements' field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - ) - ) - else: - requirements = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'hints' field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - ) - ) - else: - hints = None - if "cwlVersion" in _doc: - try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'cwlVersion' field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [e], - ) - ) - else: - cwlVersion = None - if "intent" in _doc: - try: - intent = load_field( - _doc.get("intent"), - uri_union_of_None_type_or_array_of_strtype_True_False_None, - baseuri, - loadingOptions, - ) - except ValidationException as e: - _errors__.append( - ValidationException( - "the 'intent' field is not valid because:", - SourceLine(_doc, "intent", str), - [e], - ) - ) - else: - intent = None - extension_fields: Dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - break - - if _errors__: - raise ValidationException("Trying 'Operation'", None, _errors__) - _constructed = cls( - id=id, - label=label, - doc=doc, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - cwlVersion=cwlVersion, - intent=intent, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[id] = (_constructed, loadingOptions) - return _constructed - - def save( - self, - top: bool = False, - base_url: str = "", - relative_uris: bool = True, - keys: Optional[List[Any]] = None, - inserted_line_info: Optional[Dict[int, int]] = None, - shift: int = 0 - ) -> CommentedMap: - if keys is None: - keys = [] - r = CommentedMap() - - keys = copy.copy(keys) - - doc = iterate_through_doc(keys) - - if inserted_line_info is None: - inserted_line_info = {} - - if doc: - if self.id: - temp_id = self.id - if len(temp_id.split('#')) > 1: - temp_id = self.id.split("#")[1] - if temp_id in doc: - keys.append(temp_id) - temp_doc = doc.get(temp_id) - if isinstance(temp_doc, CommentedMap): - doc = temp_doc - - if doc is not None: - r._yaml_set_line_col(doc.lc.line, doc.lc.col) - line_numbers = get_line_numbers(doc) - max_len = get_max_line_num(doc) - min_col = get_min_col(line_numbers) - cols: Dict[int, int] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - - r["class"] = "Operation" - - if doc: - base_url_to_save = base_url - if self.id: - base_url_to_save = self.id - for key in doc.lc.data.keys(): - if isinstance(key, str): - if hasattr(self, key): - if getattr(self, key) is not None: - if key != 'class': - line = doc.lc.data[key][0] + shift - if inserted_line_info: - while line in inserted_line_info: - line += 1 - shift += 1 - saved_val = save( - getattr(self, key), - top=False, - base_url=base_url_to_save, - relative_uris=relative_uris, - keys=keys + [key], - inserted_line_info=inserted_line_info, - shift=shift - ) - - # If the returned value is a list of size 1, just save the value in the list - if type(saved_val) == list: - if ( - len(saved_val) == 1 - ): - saved_val = saved_val[0] - - r[key] = saved_val - - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key=key, - val=r.get(key), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift - ) - if self.id is not None and "id" not in r: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="id", - val=r.get("id"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.label is not None and "label" not in r: - r["label"] = save( - self.label, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="label", - val=r.get("label"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.doc is not None and "doc" not in r: - r["doc"] = save( - self.doc, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="doc", - val=r.get("doc"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.inputs is not None and "inputs" not in r: - r["inputs"] = save( - self.inputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="inputs", - val=r.get("inputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.outputs is not None and "outputs" not in r: - r["outputs"] = save( - self.outputs, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="outputs", - val=r.get("outputs"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.requirements is not None and "requirements" not in r: - r["requirements"] = save( - self.requirements, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="requirements", - val=r.get("requirements"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.hints is not None and "hints" not in r: - r["hints"] = save( - self.hints, - top=False, - base_url=str(self.id), - relative_uris=relative_uris, - inserted_line_info=inserted_line_info, - shift=shift, - ) - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="hints", - val=r.get("hints"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.cwlVersion is not None and "cwlVersion" not in r: - u = save_relative_uri( - self.cwlVersion, str(self.id), False, None, relative_uris - ) - r["cwlVersion"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="cwlVersion", - val=r.get("cwlVersion"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - if self.intent is not None and "intent" not in r: - u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) - r["intent"] = u - max_len, inserted_line_info = add_kv( - old_doc=doc, - new_doc=r, - line_numbers=line_numbers, - key="intent", - val=r.get("intent"), - cols=cols, - min_col=min_col, - max_len=max_len, - inserted_line_info=inserted_line_info, - shift=shift, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "label", - "doc", - "inputs", - "outputs", - "requirements", - "hints", - "cwlVersion", - "intent", - "class", - ] - ) - - -_vocab = { - "Any": "https://w3id.org/cwl/salad#Any", - "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", - "CWLType": "https://w3id.org/cwl/cwl#CWLType", - "CWLVersion": "https://w3id.org/cwl/cwl#CWLVersion", - "CommandInputArraySchema": "https://w3id.org/cwl/cwl#CommandInputArraySchema", - "CommandInputEnumSchema": "https://w3id.org/cwl/cwl#CommandInputEnumSchema", - "CommandInputParameter": "https://w3id.org/cwl/cwl#CommandInputParameter", - "CommandInputRecordField": "https://w3id.org/cwl/cwl#CommandInputRecordField", - "CommandInputRecordSchema": "https://w3id.org/cwl/cwl#CommandInputRecordSchema", - "CommandInputSchema": "https://w3id.org/cwl/cwl#CommandInputSchema", - "CommandLineBindable": "https://w3id.org/cwl/cwl#CommandLineBindable", - "CommandLineBinding": "https://w3id.org/cwl/cwl#CommandLineBinding", - "CommandLineTool": "https://w3id.org/cwl/cwl#CommandLineTool", - "CommandOutputArraySchema": "https://w3id.org/cwl/cwl#CommandOutputArraySchema", - "CommandOutputBinding": "https://w3id.org/cwl/cwl#CommandOutputBinding", - "CommandOutputEnumSchema": "https://w3id.org/cwl/cwl#CommandOutputEnumSchema", - "CommandOutputParameter": "https://w3id.org/cwl/cwl#CommandOutputParameter", - "CommandOutputRecordField": "https://w3id.org/cwl/cwl#CommandOutputRecordField", - "CommandOutputRecordSchema": "https://w3id.org/cwl/cwl#CommandOutputRecordSchema", - "Directory": "https://w3id.org/cwl/cwl#Directory", - "Dirent": "https://w3id.org/cwl/cwl#Dirent", - "DockerRequirement": "https://w3id.org/cwl/cwl#DockerRequirement", - "Documented": "https://w3id.org/cwl/salad#Documented", - "EnumSchema": "https://w3id.org/cwl/salad#EnumSchema", - "EnvVarRequirement": "https://w3id.org/cwl/cwl#EnvVarRequirement", - "EnvironmentDef": "https://w3id.org/cwl/cwl#EnvironmentDef", - "Expression": "https://w3id.org/cwl/cwl#Expression", - "ExpressionPlaceholder": "https://w3id.org/cwl/cwl#ExpressionPlaceholder", - "ExpressionTool": "https://w3id.org/cwl/cwl#ExpressionTool", - "ExpressionToolOutputParameter": "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter", - "FieldBase": "https://w3id.org/cwl/cwl#FieldBase", - "File": "https://w3id.org/cwl/cwl#File", - "IOSchema": "https://w3id.org/cwl/cwl#IOSchema", - "Identified": "https://w3id.org/cwl/cwl#Identified", - "InitialWorkDirRequirement": "https://w3id.org/cwl/cwl#InitialWorkDirRequirement", - "InlineJavascriptRequirement": "https://w3id.org/cwl/cwl#InlineJavascriptRequirement", - "InplaceUpdateRequirement": "https://w3id.org/cwl/cwl#InplaceUpdateRequirement", - "InputArraySchema": "https://w3id.org/cwl/cwl#InputArraySchema", - "InputBinding": "https://w3id.org/cwl/cwl#InputBinding", - "InputEnumSchema": "https://w3id.org/cwl/cwl#InputEnumSchema", - "InputFormat": "https://w3id.org/cwl/cwl#InputFormat", - "InputParameter": "https://w3id.org/cwl/cwl#InputParameter", - "InputRecordField": "https://w3id.org/cwl/cwl#InputRecordField", - "InputRecordSchema": "https://w3id.org/cwl/cwl#InputRecordSchema", - "InputSchema": "https://w3id.org/cwl/cwl#InputSchema", - "Labeled": "https://w3id.org/cwl/cwl#Labeled", - "LinkMergeMethod": "https://w3id.org/cwl/cwl#LinkMergeMethod", - "LoadContents": "https://w3id.org/cwl/cwl#LoadContents", - "LoadListingEnum": "https://w3id.org/cwl/cwl#LoadListingEnum", - "LoadListingRequirement": "https://w3id.org/cwl/cwl#LoadListingRequirement", - "MultipleInputFeatureRequirement": "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement", - "NetworkAccess": "https://w3id.org/cwl/cwl#NetworkAccess", - "Operation": "https://w3id.org/cwl/cwl#Operation", - "OperationInputParameter": "https://w3id.org/cwl/cwl#OperationInputParameter", - "OperationOutputParameter": "https://w3id.org/cwl/cwl#OperationOutputParameter", - "OutputArraySchema": "https://w3id.org/cwl/cwl#OutputArraySchema", - "OutputEnumSchema": "https://w3id.org/cwl/cwl#OutputEnumSchema", - "OutputFormat": "https://w3id.org/cwl/cwl#OutputFormat", - "OutputParameter": "https://w3id.org/cwl/cwl#OutputParameter", - "OutputRecordField": "https://w3id.org/cwl/cwl#OutputRecordField", - "OutputRecordSchema": "https://w3id.org/cwl/cwl#OutputRecordSchema", - "OutputSchema": "https://w3id.org/cwl/cwl#OutputSchema", - "Parameter": "https://w3id.org/cwl/cwl#Parameter", - "PickValueMethod": "https://w3id.org/cwl/cwl#PickValueMethod", - "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", - "Process": "https://w3id.org/cwl/cwl#Process", - "ProcessRequirement": "https://w3id.org/cwl/cwl#ProcessRequirement", - "RecordField": "https://w3id.org/cwl/salad#RecordField", - "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", - "ResourceRequirement": "https://w3id.org/cwl/cwl#ResourceRequirement", - "ScatterFeatureRequirement": "https://w3id.org/cwl/cwl#ScatterFeatureRequirement", - "ScatterMethod": "https://w3id.org/cwl/cwl#ScatterMethod", - "SchemaDefRequirement": "https://w3id.org/cwl/cwl#SchemaDefRequirement", - "SecondaryFileSchema": "https://w3id.org/cwl/cwl#SecondaryFileSchema", - "ShellCommandRequirement": "https://w3id.org/cwl/cwl#ShellCommandRequirement", - "Sink": "https://w3id.org/cwl/cwl#Sink", - "SoftwarePackage": "https://w3id.org/cwl/cwl#SoftwarePackage", - "SoftwareRequirement": "https://w3id.org/cwl/cwl#SoftwareRequirement", - "StepInputExpressionRequirement": "https://w3id.org/cwl/cwl#StepInputExpressionRequirement", - "SubworkflowFeatureRequirement": "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement", - "ToolTimeLimit": "https://w3id.org/cwl/cwl#ToolTimeLimit", - "WorkReuse": "https://w3id.org/cwl/cwl#WorkReuse", - "Workflow": "https://w3id.org/cwl/cwl#Workflow", - "WorkflowInputParameter": "https://w3id.org/cwl/cwl#WorkflowInputParameter", - "WorkflowOutputParameter": "https://w3id.org/cwl/cwl#WorkflowOutputParameter", - "WorkflowStep": "https://w3id.org/cwl/cwl#WorkflowStep", - "WorkflowStepInput": "https://w3id.org/cwl/cwl#WorkflowStepInput", - "WorkflowStepOutput": "https://w3id.org/cwl/cwl#WorkflowStepOutput", - "all_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/all_non_null", - "array": "https://w3id.org/cwl/salad#array", - "boolean": "http://www.w3.org/2001/XMLSchema#boolean", - "deep_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing", - "dotproduct": "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct", - "double": "http://www.w3.org/2001/XMLSchema#double", - "draft-2": "https://w3id.org/cwl/cwl#draft-2", - "draft-3": "https://w3id.org/cwl/cwl#draft-3", - "draft-3.dev1": "https://w3id.org/cwl/cwl#draft-3.dev1", - "draft-3.dev2": "https://w3id.org/cwl/cwl#draft-3.dev2", - "draft-3.dev3": "https://w3id.org/cwl/cwl#draft-3.dev3", - "draft-3.dev4": "https://w3id.org/cwl/cwl#draft-3.dev4", - "draft-3.dev5": "https://w3id.org/cwl/cwl#draft-3.dev5", - "draft-4.dev1": "https://w3id.org/cwl/cwl#draft-4.dev1", - "draft-4.dev2": "https://w3id.org/cwl/cwl#draft-4.dev2", - "draft-4.dev3": "https://w3id.org/cwl/cwl#draft-4.dev3", - "enum": "https://w3id.org/cwl/salad#enum", - "first_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/first_non_null", - "flat_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct", - "float": "http://www.w3.org/2001/XMLSchema#float", - "int": "http://www.w3.org/2001/XMLSchema#int", - "long": "http://www.w3.org/2001/XMLSchema#long", - "merge_flattened": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened", - "merge_nested": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested", - "nested_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct", - "no_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/no_listing", - "null": "https://w3id.org/cwl/salad#null", - "record": "https://w3id.org/cwl/salad#record", - "shallow_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/shallow_listing", - "stderr": "https://w3id.org/cwl/cwl#stderr", - "stdin": "https://w3id.org/cwl/cwl#stdin", - "stdout": "https://w3id.org/cwl/cwl#stdout", - "string": "http://www.w3.org/2001/XMLSchema#string", - "the_only_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/the_only_non_null", - "v1.0": "https://w3id.org/cwl/cwl#v1.0", - "v1.0.dev4": "https://w3id.org/cwl/cwl#v1.0.dev4", - "v1.1": "https://w3id.org/cwl/cwl#v1.1", - "v1.1.0-dev1": "https://w3id.org/cwl/cwl#v1.1.0-dev1", - "v1.2": "https://w3id.org/cwl/cwl#v1.2", - "v1.2.0-dev1": "https://w3id.org/cwl/cwl#v1.2.0-dev1", - "v1.2.0-dev2": "https://w3id.org/cwl/cwl#v1.2.0-dev2", - "v1.2.0-dev3": "https://w3id.org/cwl/cwl#v1.2.0-dev3", - "v1.2.0-dev4": "https://w3id.org/cwl/cwl#v1.2.0-dev4", - "v1.2.0-dev5": "https://w3id.org/cwl/cwl#v1.2.0-dev5", -} -_rvocab = { - "https://w3id.org/cwl/salad#Any": "Any", - "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", - "https://w3id.org/cwl/cwl#CWLType": "CWLType", - "https://w3id.org/cwl/cwl#CWLVersion": "CWLVersion", - "https://w3id.org/cwl/cwl#CommandInputArraySchema": "CommandInputArraySchema", - "https://w3id.org/cwl/cwl#CommandInputEnumSchema": "CommandInputEnumSchema", - "https://w3id.org/cwl/cwl#CommandInputParameter": "CommandInputParameter", - "https://w3id.org/cwl/cwl#CommandInputRecordField": "CommandInputRecordField", - "https://w3id.org/cwl/cwl#CommandInputRecordSchema": "CommandInputRecordSchema", - "https://w3id.org/cwl/cwl#CommandInputSchema": "CommandInputSchema", - "https://w3id.org/cwl/cwl#CommandLineBindable": "CommandLineBindable", - "https://w3id.org/cwl/cwl#CommandLineBinding": "CommandLineBinding", - "https://w3id.org/cwl/cwl#CommandLineTool": "CommandLineTool", - "https://w3id.org/cwl/cwl#CommandOutputArraySchema": "CommandOutputArraySchema", - "https://w3id.org/cwl/cwl#CommandOutputBinding": "CommandOutputBinding", - "https://w3id.org/cwl/cwl#CommandOutputEnumSchema": "CommandOutputEnumSchema", - "https://w3id.org/cwl/cwl#CommandOutputParameter": "CommandOutputParameter", - "https://w3id.org/cwl/cwl#CommandOutputRecordField": "CommandOutputRecordField", - "https://w3id.org/cwl/cwl#CommandOutputRecordSchema": "CommandOutputRecordSchema", - "https://w3id.org/cwl/cwl#Directory": "Directory", - "https://w3id.org/cwl/cwl#Dirent": "Dirent", - "https://w3id.org/cwl/cwl#DockerRequirement": "DockerRequirement", - "https://w3id.org/cwl/salad#Documented": "Documented", - "https://w3id.org/cwl/salad#EnumSchema": "EnumSchema", - "https://w3id.org/cwl/cwl#EnvVarRequirement": "EnvVarRequirement", - "https://w3id.org/cwl/cwl#EnvironmentDef": "EnvironmentDef", - "https://w3id.org/cwl/cwl#Expression": "Expression", - "https://w3id.org/cwl/cwl#ExpressionPlaceholder": "ExpressionPlaceholder", - "https://w3id.org/cwl/cwl#ExpressionTool": "ExpressionTool", - "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter": "ExpressionToolOutputParameter", - "https://w3id.org/cwl/cwl#FieldBase": "FieldBase", - "https://w3id.org/cwl/cwl#File": "File", - "https://w3id.org/cwl/cwl#IOSchema": "IOSchema", - "https://w3id.org/cwl/cwl#Identified": "Identified", - "https://w3id.org/cwl/cwl#InitialWorkDirRequirement": "InitialWorkDirRequirement", - "https://w3id.org/cwl/cwl#InlineJavascriptRequirement": "InlineJavascriptRequirement", - "https://w3id.org/cwl/cwl#InplaceUpdateRequirement": "InplaceUpdateRequirement", - "https://w3id.org/cwl/cwl#InputArraySchema": "InputArraySchema", - "https://w3id.org/cwl/cwl#InputBinding": "InputBinding", - "https://w3id.org/cwl/cwl#InputEnumSchema": "InputEnumSchema", - "https://w3id.org/cwl/cwl#InputFormat": "InputFormat", - "https://w3id.org/cwl/cwl#InputParameter": "InputParameter", - "https://w3id.org/cwl/cwl#InputRecordField": "InputRecordField", - "https://w3id.org/cwl/cwl#InputRecordSchema": "InputRecordSchema", - "https://w3id.org/cwl/cwl#InputSchema": "InputSchema", - "https://w3id.org/cwl/cwl#Labeled": "Labeled", - "https://w3id.org/cwl/cwl#LinkMergeMethod": "LinkMergeMethod", - "https://w3id.org/cwl/cwl#LoadContents": "LoadContents", - "https://w3id.org/cwl/cwl#LoadListingEnum": "LoadListingEnum", - "https://w3id.org/cwl/cwl#LoadListingRequirement": "LoadListingRequirement", - "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement": "MultipleInputFeatureRequirement", - "https://w3id.org/cwl/cwl#NetworkAccess": "NetworkAccess", - "https://w3id.org/cwl/cwl#Operation": "Operation", - "https://w3id.org/cwl/cwl#OperationInputParameter": "OperationInputParameter", - "https://w3id.org/cwl/cwl#OperationOutputParameter": "OperationOutputParameter", - "https://w3id.org/cwl/cwl#OutputArraySchema": "OutputArraySchema", - "https://w3id.org/cwl/cwl#OutputEnumSchema": "OutputEnumSchema", - "https://w3id.org/cwl/cwl#OutputFormat": "OutputFormat", - "https://w3id.org/cwl/cwl#OutputParameter": "OutputParameter", - "https://w3id.org/cwl/cwl#OutputRecordField": "OutputRecordField", - "https://w3id.org/cwl/cwl#OutputRecordSchema": "OutputRecordSchema", - "https://w3id.org/cwl/cwl#OutputSchema": "OutputSchema", - "https://w3id.org/cwl/cwl#Parameter": "Parameter", - "https://w3id.org/cwl/cwl#PickValueMethod": "PickValueMethod", - "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", - "https://w3id.org/cwl/cwl#Process": "Process", - "https://w3id.org/cwl/cwl#ProcessRequirement": "ProcessRequirement", - "https://w3id.org/cwl/salad#RecordField": "RecordField", - "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", - "https://w3id.org/cwl/cwl#ResourceRequirement": "ResourceRequirement", - "https://w3id.org/cwl/cwl#ScatterFeatureRequirement": "ScatterFeatureRequirement", - "https://w3id.org/cwl/cwl#ScatterMethod": "ScatterMethod", - "https://w3id.org/cwl/cwl#SchemaDefRequirement": "SchemaDefRequirement", - "https://w3id.org/cwl/cwl#SecondaryFileSchema": "SecondaryFileSchema", - "https://w3id.org/cwl/cwl#ShellCommandRequirement": "ShellCommandRequirement", - "https://w3id.org/cwl/cwl#Sink": "Sink", - "https://w3id.org/cwl/cwl#SoftwarePackage": "SoftwarePackage", - "https://w3id.org/cwl/cwl#SoftwareRequirement": "SoftwareRequirement", - "https://w3id.org/cwl/cwl#StepInputExpressionRequirement": "StepInputExpressionRequirement", - "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement": "SubworkflowFeatureRequirement", - "https://w3id.org/cwl/cwl#ToolTimeLimit": "ToolTimeLimit", - "https://w3id.org/cwl/cwl#WorkReuse": "WorkReuse", - "https://w3id.org/cwl/cwl#Workflow": "Workflow", - "https://w3id.org/cwl/cwl#WorkflowInputParameter": "WorkflowInputParameter", - "https://w3id.org/cwl/cwl#WorkflowOutputParameter": "WorkflowOutputParameter", - "https://w3id.org/cwl/cwl#WorkflowStep": "WorkflowStep", - "https://w3id.org/cwl/cwl#WorkflowStepInput": "WorkflowStepInput", - "https://w3id.org/cwl/cwl#WorkflowStepOutput": "WorkflowStepOutput", - "https://w3id.org/cwl/cwl#PickValueMethod/all_non_null": "all_non_null", - "https://w3id.org/cwl/salad#array": "array", - "http://www.w3.org/2001/XMLSchema#boolean": "boolean", - "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing": "deep_listing", - "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct": "dotproduct", - "http://www.w3.org/2001/XMLSchema#double": "double", - "https://w3id.org/cwl/cwl#draft-2": "draft-2", - "https://w3id.org/cwl/cwl#draft-3": "draft-3", - "https://w3id.org/cwl/cwl#draft-3.dev1": "draft-3.dev1", - "https://w3id.org/cwl/cwl#draft-3.dev2": "draft-3.dev2", - "https://w3id.org/cwl/cwl#draft-3.dev3": "draft-3.dev3", - "https://w3id.org/cwl/cwl#draft-3.dev4": "draft-3.dev4", - "https://w3id.org/cwl/cwl#draft-3.dev5": "draft-3.dev5", - "https://w3id.org/cwl/cwl#draft-4.dev1": "draft-4.dev1", - "https://w3id.org/cwl/cwl#draft-4.dev2": "draft-4.dev2", - "https://w3id.org/cwl/cwl#draft-4.dev3": "draft-4.dev3", - "https://w3id.org/cwl/salad#enum": "enum", - "https://w3id.org/cwl/cwl#PickValueMethod/first_non_null": "first_non_null", - "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct": "flat_crossproduct", - "http://www.w3.org/2001/XMLSchema#float": "float", - "http://www.w3.org/2001/XMLSchema#int": "int", - "http://www.w3.org/2001/XMLSchema#long": "long", - "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened": "merge_flattened", - "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested": "merge_nested", - "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct": "nested_crossproduct", - "https://w3id.org/cwl/cwl#LoadListingEnum/no_listing": "no_listing", - "https://w3id.org/cwl/salad#null": "null", - "https://w3id.org/cwl/salad#record": "record", - "https://w3id.org/cwl/cwl#LoadListingEnum/shallow_listing": "shallow_listing", - "https://w3id.org/cwl/cwl#stderr": "stderr", - "https://w3id.org/cwl/cwl#stdin": "stdin", - "https://w3id.org/cwl/cwl#stdout": "stdout", - "http://www.w3.org/2001/XMLSchema#string": "string", - "https://w3id.org/cwl/cwl#PickValueMethod/the_only_non_null": "the_only_non_null", - "https://w3id.org/cwl/cwl#v1.0": "v1.0", - "https://w3id.org/cwl/cwl#v1.0.dev4": "v1.0.dev4", - "https://w3id.org/cwl/cwl#v1.1": "v1.1", - "https://w3id.org/cwl/cwl#v1.1.0-dev1": "v1.1.0-dev1", - "https://w3id.org/cwl/cwl#v1.2": "v1.2", - "https://w3id.org/cwl/cwl#v1.2.0-dev1": "v1.2.0-dev1", - "https://w3id.org/cwl/cwl#v1.2.0-dev2": "v1.2.0-dev2", - "https://w3id.org/cwl/cwl#v1.2.0-dev3": "v1.2.0-dev3", - "https://w3id.org/cwl/cwl#v1.2.0-dev4": "v1.2.0-dev4", - "https://w3id.org/cwl/cwl#v1.2.0-dev5": "v1.2.0-dev5", -} - -strtype = _PrimitiveLoader(str) -inttype = _PrimitiveLoader(int) -floattype = _PrimitiveLoader(float) -booltype = _PrimitiveLoader(bool) -None_type = _PrimitiveLoader(type(None)) -Any_type = _AnyLoader() -PrimitiveTypeLoader = _EnumLoader( - ( - "null", - "boolean", - "int", - "long", - "float", - "double", - "string", - ), - "PrimitiveType", -) -""" -Names of salad data types (based on Avro schema declarations). - -Refer to the [Avro schema declaration documentation](https://avro.apache.org/docs/current/spec.html#schemas) for -detailed information. - -null: no value -boolean: a binary value -int: 32-bit signed integer -long: 64-bit signed integer -float: single precision (32-bit) IEEE 754 floating-point number -double: double precision (64-bit) IEEE 754 floating-point number -string: Unicode character sequence -""" -AnyLoader = _EnumLoader(("Any",), "Any") -""" -The **Any** type validates for any non-null value. -""" -RecordFieldLoader = _RecordLoader(RecordField) -RecordSchemaLoader = _RecordLoader(RecordSchema) -EnumSchemaLoader = _RecordLoader(EnumSchema) -ArraySchemaLoader = _RecordLoader(ArraySchema) -CWLVersionLoader = _EnumLoader( - ( - "draft-2", - "draft-3.dev1", - "draft-3.dev2", - "draft-3.dev3", - "draft-3.dev4", - "draft-3.dev5", - "draft-3", - "draft-4.dev1", - "draft-4.dev2", - "draft-4.dev3", - "v1.0.dev4", - "v1.0", - "v1.1.0-dev1", - "v1.1", - "v1.2.0-dev1", - "v1.2.0-dev2", - "v1.2.0-dev3", - "v1.2.0-dev4", - "v1.2.0-dev5", - "v1.2", - ), - "CWLVersion", -) -""" -Version symbols for published CWL document versions. -""" -CWLTypeLoader = _EnumLoader( - ( - "null", - "boolean", - "int", - "long", - "float", - "double", - "string", - "File", - "Directory", - ), - "CWLType", -) -""" -Extends primitive types with the concept of a file and directory as a builtin type. -File: A File object -Directory: A Directory object -""" -FileLoader = _RecordLoader(File) -DirectoryLoader = _RecordLoader(Directory) -LoadListingEnumLoader = _EnumLoader( - ( - "no_listing", - "shallow_listing", - "deep_listing", - ), - "LoadListingEnum", -) -""" -Specify the desired behavior for loading the `listing` field of -a Directory object for use by expressions. - -no_listing: Do not load the directory listing. -shallow_listing: Only load the top level listing, do not recurse into subdirectories. -deep_listing: Load the directory listing and recursively load all subdirectories as well. -""" -ExpressionLoader = _ExpressionLoader(str) -InputBindingLoader = _RecordLoader(InputBinding) -InputRecordFieldLoader = _RecordLoader(InputRecordField) -InputRecordSchemaLoader = _RecordLoader(InputRecordSchema) -InputEnumSchemaLoader = _RecordLoader(InputEnumSchema) -InputArraySchemaLoader = _RecordLoader(InputArraySchema) -OutputRecordFieldLoader = _RecordLoader(OutputRecordField) -OutputRecordSchemaLoader = _RecordLoader(OutputRecordSchema) -OutputEnumSchemaLoader = _RecordLoader(OutputEnumSchema) -OutputArraySchemaLoader = _RecordLoader(OutputArraySchema) -InlineJavascriptRequirementLoader = _RecordLoader(InlineJavascriptRequirement) -SchemaDefRequirementLoader = _RecordLoader(SchemaDefRequirement) -SecondaryFileSchemaLoader = _RecordLoader(SecondaryFileSchema) -LoadListingRequirementLoader = _RecordLoader(LoadListingRequirement) -EnvironmentDefLoader = _RecordLoader(EnvironmentDef) -CommandLineBindingLoader = _RecordLoader(CommandLineBinding) -CommandOutputBindingLoader = _RecordLoader(CommandOutputBinding) -CommandLineBindableLoader = _RecordLoader(CommandLineBindable) -CommandInputRecordFieldLoader = _RecordLoader(CommandInputRecordField) -CommandInputRecordSchemaLoader = _RecordLoader(CommandInputRecordSchema) -CommandInputEnumSchemaLoader = _RecordLoader(CommandInputEnumSchema) -CommandInputArraySchemaLoader = _RecordLoader(CommandInputArraySchema) -CommandOutputRecordFieldLoader = _RecordLoader(CommandOutputRecordField) -CommandOutputRecordSchemaLoader = _RecordLoader(CommandOutputRecordSchema) -CommandOutputEnumSchemaLoader = _RecordLoader(CommandOutputEnumSchema) -CommandOutputArraySchemaLoader = _RecordLoader(CommandOutputArraySchema) -CommandInputParameterLoader = _RecordLoader(CommandInputParameter) -CommandOutputParameterLoader = _RecordLoader(CommandOutputParameter) -stdinLoader = _EnumLoader(("stdin",), "stdin") -""" -Only valid as a `type` for a `CommandLineTool` input with no -`inputBinding` set. `stdin` must not be specified at the `CommandLineTool` -level. - -The following -``` -inputs: - an_input_name: - type: stdin -``` -is equivalent to -``` -inputs: - an_input_name: - type: File - streamable: true - -stdin: $(inputs.an_input_name.path) -``` -""" -stdoutLoader = _EnumLoader(("stdout",), "stdout") -""" -Only valid as a `type` for a `CommandLineTool` output with no -`outputBinding` set. - -The following -``` -outputs: - an_output_name: - type: stdout - -stdout: a_stdout_file -``` -is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: a_stdout_file - -stdout: a_stdout_file -``` - -If there is no `stdout` name provided, a random filename will be created. -For example, the following -``` -outputs: - an_output_name: - type: stdout -``` -is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: random_stdout_filenameABCDEFG - -stdout: random_stdout_filenameABCDEFG -``` - -If the `CommandLineTool` contains logically chained commands -(e.g. `echo a && echo b`) `stdout` must include the output of -every command. -""" -stderrLoader = _EnumLoader(("stderr",), "stderr") -""" -Only valid as a `type` for a `CommandLineTool` output with no -`outputBinding` set. - -The following -``` -outputs: - an_output_name: - type: stderr - -stderr: a_stderr_file -``` -is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: a_stderr_file - -stderr: a_stderr_file -``` - -If there is no `stderr` name provided, a random filename will be created. -For example, the following -``` -outputs: - an_output_name: - type: stderr -``` -is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: random_stderr_filenameABCDEFG - -stderr: random_stderr_filenameABCDEFG -``` -""" -CommandLineToolLoader = _RecordLoader(CommandLineTool) -DockerRequirementLoader = _RecordLoader(DockerRequirement) -SoftwareRequirementLoader = _RecordLoader(SoftwareRequirement) -SoftwarePackageLoader = _RecordLoader(SoftwarePackage) -DirentLoader = _RecordLoader(Dirent) -InitialWorkDirRequirementLoader = _RecordLoader(InitialWorkDirRequirement) -EnvVarRequirementLoader = _RecordLoader(EnvVarRequirement) -ShellCommandRequirementLoader = _RecordLoader(ShellCommandRequirement) -ResourceRequirementLoader = _RecordLoader(ResourceRequirement) -WorkReuseLoader = _RecordLoader(WorkReuse) -NetworkAccessLoader = _RecordLoader(NetworkAccess) -InplaceUpdateRequirementLoader = _RecordLoader(InplaceUpdateRequirement) -ToolTimeLimitLoader = _RecordLoader(ToolTimeLimit) -ExpressionToolOutputParameterLoader = _RecordLoader(ExpressionToolOutputParameter) -WorkflowInputParameterLoader = _RecordLoader(WorkflowInputParameter) -ExpressionToolLoader = _RecordLoader(ExpressionTool) -LinkMergeMethodLoader = _EnumLoader( - ( - "merge_nested", - "merge_flattened", - ), - "LinkMergeMethod", -) -""" -The input link merge method, described in [WorkflowStepInput](#WorkflowStepInput). -""" -PickValueMethodLoader = _EnumLoader( - ( - "first_non_null", - "the_only_non_null", - "all_non_null", - ), - "PickValueMethod", -) -""" -Picking non-null values among inbound data links, described in [WorkflowStepInput](#WorkflowStepInput). -""" -WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter) -WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput) -WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput) -ScatterMethodLoader = _EnumLoader( - ( - "dotproduct", - "nested_crossproduct", - "flat_crossproduct", - ), - "ScatterMethod", -) -""" -The scatter method, as described in [workflow step scatter](#WorkflowStep). -""" -WorkflowStepLoader = _RecordLoader(WorkflowStep) -WorkflowLoader = _RecordLoader(Workflow) -SubworkflowFeatureRequirementLoader = _RecordLoader(SubworkflowFeatureRequirement) -ScatterFeatureRequirementLoader = _RecordLoader(ScatterFeatureRequirement) -MultipleInputFeatureRequirementLoader = _RecordLoader(MultipleInputFeatureRequirement) -StepInputExpressionRequirementLoader = _RecordLoader(StepInputExpressionRequirement) -OperationInputParameterLoader = _RecordLoader(OperationInputParameter) -OperationOutputParameterLoader = _RecordLoader(OperationOutputParameter) -OperationLoader = _RecordLoader(Operation) -array_of_strtype = _ArrayLoader(strtype) -union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader( - ( - None_type, - strtype, - array_of_strtype, - ) -) -uri_strtype_True_False_None = _URILoader(strtype, True, False, None) -union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader( - ( - PrimitiveTypeLoader, - RecordSchemaLoader, - EnumSchemaLoader, - ArraySchemaLoader, - strtype, - ) -) -array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _ArrayLoader( - union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype -) -union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader( - ( - PrimitiveTypeLoader, - RecordSchemaLoader, - EnumSchemaLoader, - ArraySchemaLoader, - strtype, - array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, - 2, -) -array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) -union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader( - ( - None_type, - array_of_RecordFieldLoader, - ) -) -idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_RecordFieldLoader, "name", "type" -) -Record_nameLoader = _EnumLoader(("record",), "Record_name") -typedsl_Record_nameLoader_2 = _TypeDSLLoader(Record_nameLoader, 2) -union_of_None_type_or_strtype = _UnionLoader( - ( - None_type, - strtype, - ) -) -uri_union_of_None_type_or_strtype_True_False_None = _URILoader( - union_of_None_type_or_strtype, True, False, None -) -uri_array_of_strtype_True_False_None = _URILoader(array_of_strtype, True, False, None) -Enum_nameLoader = _EnumLoader(("enum",), "Enum_name") -typedsl_Enum_nameLoader_2 = _TypeDSLLoader(Enum_nameLoader, 2) -uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2 = _URILoader( - union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, - False, - True, - 2, -) -Array_nameLoader = _EnumLoader(("array",), "Array_name") -typedsl_Array_nameLoader_2 = _TypeDSLLoader(Array_nameLoader, 2) -File_classLoader = _EnumLoader(("File",), "File_class") -uri_File_classLoader_False_True_None = _URILoader(File_classLoader, False, True, None) -uri_union_of_None_type_or_strtype_False_False_None = _URILoader( - union_of_None_type_or_strtype, False, False, None -) -union_of_None_type_or_inttype = _UnionLoader( - ( - None_type, - inttype, - ) -) -union_of_FileLoader_or_DirectoryLoader = _UnionLoader( - ( - FileLoader, - DirectoryLoader, - ) -) -array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( - union_of_FileLoader_or_DirectoryLoader -) -union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( - ( - None_type, - array_of_union_of_FileLoader_or_DirectoryLoader, - ) -) -secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _SecondaryDSLLoader( - union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader -) -Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") -uri_Directory_classLoader_False_True_None = _URILoader( - Directory_classLoader, False, True, None -) -union_of_None_type_or_booltype = _UnionLoader( - ( - None_type, - booltype, - ) -) -union_of_None_type_or_LoadListingEnumLoader = _UnionLoader( - ( - None_type, - LoadListingEnumLoader, - ) -) -array_of_SecondaryFileSchemaLoader = _ArrayLoader(SecondaryFileSchemaLoader) -union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _UnionLoader( - ( - None_type, - SecondaryFileSchemaLoader, - array_of_SecondaryFileSchemaLoader, - ) -) -secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _SecondaryDSLLoader( - union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader -) -union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader = _UnionLoader( - ( - None_type, - strtype, - array_of_strtype, - ExpressionLoader, - ) -) -uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, - True, - False, - None, -) -union_of_None_type_or_strtype_or_ExpressionLoader = _UnionLoader( - ( - None_type, - strtype, - ExpressionLoader, - ) -) -uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None = _URILoader( - union_of_None_type_or_strtype_or_ExpressionLoader, True, False, None -) -union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - InputRecordSchemaLoader, - InputEnumSchemaLoader, - InputArraySchemaLoader, - strtype, - ) -) -array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _ArrayLoader( - union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype -) -union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - InputRecordSchemaLoader, - InputEnumSchemaLoader, - InputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, - 2, -) -array_of_InputRecordFieldLoader = _ArrayLoader(InputRecordFieldLoader) -union_of_None_type_or_array_of_InputRecordFieldLoader = _UnionLoader( - ( - None_type, - array_of_InputRecordFieldLoader, - ) -) -idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_InputRecordFieldLoader, "name", "type" -) -uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2 = _URILoader( - union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, - False, - True, - 2, -) -union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - OutputRecordSchemaLoader, - OutputEnumSchemaLoader, - OutputArraySchemaLoader, - strtype, - ) -) -array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _ArrayLoader( - union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype -) -union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - OutputRecordSchemaLoader, - OutputEnumSchemaLoader, - OutputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, - 2, -) -array_of_OutputRecordFieldLoader = _ArrayLoader(OutputRecordFieldLoader) -union_of_None_type_or_array_of_OutputRecordFieldLoader = _UnionLoader( - ( - None_type, - array_of_OutputRecordFieldLoader, - ) -) -idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_OutputRecordFieldLoader, "name", "type" -) -uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2 = _URILoader( - union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, - False, - True, - 2, -) -union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type = _UnionLoader( - ( - None_type, - FileLoader, - DirectoryLoader, - Any_type, - ) -) -union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _UnionLoader( - ( - CommandInputParameterLoader, - WorkflowInputParameterLoader, - OperationInputParameterLoader, - ) -) -array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _ArrayLoader( - union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader -) -idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _IdMapLoader( - array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader, - "id", - "type", -) -union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _UnionLoader( - ( - CommandOutputParameterLoader, - ExpressionToolOutputParameterLoader, - WorkflowOutputParameterLoader, - OperationOutputParameterLoader, - ) -) -array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _ArrayLoader( - union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader -) -idmap_outputs_array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _IdMapLoader( - array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader, - "id", - "type", -) -union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( - ( - InlineJavascriptRequirementLoader, - SchemaDefRequirementLoader, - LoadListingRequirementLoader, - DockerRequirementLoader, - SoftwareRequirementLoader, - InitialWorkDirRequirementLoader, - EnvVarRequirementLoader, - ShellCommandRequirementLoader, - ResourceRequirementLoader, - WorkReuseLoader, - NetworkAccessLoader, - InplaceUpdateRequirementLoader, - ToolTimeLimitLoader, - SubworkflowFeatureRequirementLoader, - ScatterFeatureRequirementLoader, - MultipleInputFeatureRequirementLoader, - StepInputExpressionRequirementLoader, - ) -) -array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _ArrayLoader( - union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader -) -union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( - ( - None_type, - array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - ) -) -idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _IdMapLoader( - union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, - "class", - "None", -) -union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( - ( - InlineJavascriptRequirementLoader, - SchemaDefRequirementLoader, - LoadListingRequirementLoader, - DockerRequirementLoader, - SoftwareRequirementLoader, - InitialWorkDirRequirementLoader, - EnvVarRequirementLoader, - ShellCommandRequirementLoader, - ResourceRequirementLoader, - WorkReuseLoader, - NetworkAccessLoader, - InplaceUpdateRequirementLoader, - ToolTimeLimitLoader, - SubworkflowFeatureRequirementLoader, - ScatterFeatureRequirementLoader, - MultipleInputFeatureRequirementLoader, - StepInputExpressionRequirementLoader, - Any_type, - ) -) -array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _ArrayLoader( - union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type -) -union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( - ( - None_type, - array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, - ) -) -idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _IdMapLoader( - union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, - "class", - "None", -) -union_of_None_type_or_CWLVersionLoader = _UnionLoader( - ( - None_type, - CWLVersionLoader, - ) -) -uri_union_of_None_type_or_CWLVersionLoader_False_True_None = _URILoader( - union_of_None_type_or_CWLVersionLoader, False, True, None -) -union_of_None_type_or_array_of_strtype = _UnionLoader( - ( - None_type, - array_of_strtype, - ) -) -uri_union_of_None_type_or_array_of_strtype_True_False_None = _URILoader( - union_of_None_type_or_array_of_strtype, True, False, None -) -InlineJavascriptRequirement_classLoader = _EnumLoader( - ("InlineJavascriptRequirement",), "InlineJavascriptRequirement_class" -) -uri_InlineJavascriptRequirement_classLoader_False_True_None = _URILoader( - InlineJavascriptRequirement_classLoader, False, True, None -) -SchemaDefRequirement_classLoader = _EnumLoader( - ("SchemaDefRequirement",), "SchemaDefRequirement_class" -) -uri_SchemaDefRequirement_classLoader_False_True_None = _URILoader( - SchemaDefRequirement_classLoader, False, True, None -) -union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _UnionLoader( - ( - CommandInputRecordSchemaLoader, - CommandInputEnumSchemaLoader, - CommandInputArraySchemaLoader, - ) -) -array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _ArrayLoader( - union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader -) -union_of_strtype_or_ExpressionLoader = _UnionLoader( - ( - strtype, - ExpressionLoader, - ) -) -union_of_None_type_or_booltype_or_ExpressionLoader = _UnionLoader( - ( - None_type, - booltype, - ExpressionLoader, - ) -) -LoadListingRequirement_classLoader = _EnumLoader( - ("LoadListingRequirement",), "LoadListingRequirement_class" -) -uri_LoadListingRequirement_classLoader_False_True_None = _URILoader( - LoadListingRequirement_classLoader, False, True, None -) -union_of_None_type_or_inttype_or_ExpressionLoader = _UnionLoader( - ( - None_type, - inttype, - ExpressionLoader, - ) -) -union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype = _UnionLoader( - ( - None_type, - strtype, - ExpressionLoader, - array_of_strtype, - ) -) -union_of_None_type_or_ExpressionLoader = _UnionLoader( - ( - None_type, - ExpressionLoader, - ) -) -union_of_None_type_or_CommandLineBindingLoader = _UnionLoader( - ( - None_type, - CommandLineBindingLoader, - ) -) -union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - CommandInputRecordSchemaLoader, - CommandInputEnumSchemaLoader, - CommandInputArraySchemaLoader, - strtype, - ) -) -array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _ArrayLoader( - union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype -) -union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - CommandInputRecordSchemaLoader, - CommandInputEnumSchemaLoader, - CommandInputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, - 2, -) -array_of_CommandInputRecordFieldLoader = _ArrayLoader(CommandInputRecordFieldLoader) -union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _UnionLoader( - ( - None_type, - array_of_CommandInputRecordFieldLoader, - ) -) -idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( - _IdMapLoader( - union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" - ) -) -uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2 = _URILoader( - union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, - False, - True, - 2, -) -union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - CommandOutputRecordSchemaLoader, - CommandOutputEnumSchemaLoader, - CommandOutputArraySchemaLoader, - strtype, - ) -) -array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _ArrayLoader( - union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype -) -union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - CommandOutputRecordSchemaLoader, - CommandOutputEnumSchemaLoader, - CommandOutputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, - 2, -) -union_of_None_type_or_CommandOutputBindingLoader = _UnionLoader( - ( - None_type, - CommandOutputBindingLoader, - ) -) -array_of_CommandOutputRecordFieldLoader = _ArrayLoader(CommandOutputRecordFieldLoader) -union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _UnionLoader( - ( - None_type, - array_of_CommandOutputRecordFieldLoader, - ) -) -idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( - _IdMapLoader( - union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" - ) -) -uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2 = _URILoader( - union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, - False, - True, - 2, -) -union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - stdinLoader, - CommandInputRecordSchemaLoader, - CommandInputEnumSchemaLoader, - CommandInputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, - 2, -) -union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( - ( - CWLTypeLoader, - stdoutLoader, - stderrLoader, - CommandOutputRecordSchemaLoader, - CommandOutputEnumSchemaLoader, - CommandOutputArraySchemaLoader, - strtype, - array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, - ) -) -typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( - union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, - 2, -) -CommandLineTool_classLoader = _EnumLoader(("CommandLineTool",), "CommandLineTool_class") -uri_CommandLineTool_classLoader_False_True_None = _URILoader( - CommandLineTool_classLoader, False, True, None -) -array_of_CommandInputParameterLoader = _ArrayLoader(CommandInputParameterLoader) -idmap_inputs_array_of_CommandInputParameterLoader = _IdMapLoader( - array_of_CommandInputParameterLoader, "id", "type" -) -array_of_CommandOutputParameterLoader = _ArrayLoader(CommandOutputParameterLoader) -idmap_outputs_array_of_CommandOutputParameterLoader = _IdMapLoader( - array_of_CommandOutputParameterLoader, "id", "type" -) -union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( - ( - strtype, - ExpressionLoader, - CommandLineBindingLoader, - ) -) -array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( - _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) -) -union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( - ( - None_type, - array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, - ) -) -array_of_inttype = _ArrayLoader(inttype) -union_of_None_type_or_array_of_inttype = _UnionLoader( - ( - None_type, - array_of_inttype, - ) -) -DockerRequirement_classLoader = _EnumLoader( - ("DockerRequirement",), "DockerRequirement_class" -) -uri_DockerRequirement_classLoader_False_True_None = _URILoader( - DockerRequirement_classLoader, False, True, None -) -SoftwareRequirement_classLoader = _EnumLoader( - ("SoftwareRequirement",), "SoftwareRequirement_class" -) -uri_SoftwareRequirement_classLoader_False_True_None = _URILoader( - SoftwareRequirement_classLoader, False, True, None -) -array_of_SoftwarePackageLoader = _ArrayLoader(SoftwarePackageLoader) -idmap_packages_array_of_SoftwarePackageLoader = _IdMapLoader( - array_of_SoftwarePackageLoader, "package", "specs" -) -uri_union_of_None_type_or_array_of_strtype_False_False_None = _URILoader( - union_of_None_type_or_array_of_strtype, False, False, None -) -InitialWorkDirRequirement_classLoader = _EnumLoader( - ("InitialWorkDirRequirement",), "InitialWorkDirRequirement_class" -) -uri_InitialWorkDirRequirement_classLoader_False_True_None = _URILoader( - InitialWorkDirRequirement_classLoader, False, True, None -) -union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( - ( - None_type, - DirentLoader, - ExpressionLoader, - FileLoader, - DirectoryLoader, - array_of_union_of_FileLoader_or_DirectoryLoader, - ) -) -array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( - union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader -) -union_of_ExpressionLoader_or_array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( - ( - ExpressionLoader, - array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader, - ) -) -EnvVarRequirement_classLoader = _EnumLoader( - ("EnvVarRequirement",), "EnvVarRequirement_class" -) -uri_EnvVarRequirement_classLoader_False_True_None = _URILoader( - EnvVarRequirement_classLoader, False, True, None -) -array_of_EnvironmentDefLoader = _ArrayLoader(EnvironmentDefLoader) -idmap_envDef_array_of_EnvironmentDefLoader = _IdMapLoader( - array_of_EnvironmentDefLoader, "envName", "envValue" -) -ShellCommandRequirement_classLoader = _EnumLoader( - ("ShellCommandRequirement",), "ShellCommandRequirement_class" -) -uri_ShellCommandRequirement_classLoader_False_True_None = _URILoader( - ShellCommandRequirement_classLoader, False, True, None -) -ResourceRequirement_classLoader = _EnumLoader( - ("ResourceRequirement",), "ResourceRequirement_class" -) -uri_ResourceRequirement_classLoader_False_True_None = _URILoader( - ResourceRequirement_classLoader, False, True, None -) -union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader = _UnionLoader( - ( - None_type, - inttype, - floattype, - ExpressionLoader, - ) -) -WorkReuse_classLoader = _EnumLoader(("WorkReuse",), "WorkReuse_class") -uri_WorkReuse_classLoader_False_True_None = _URILoader( - WorkReuse_classLoader, False, True, None -) -union_of_booltype_or_ExpressionLoader = _UnionLoader( - ( - booltype, - ExpressionLoader, - ) -) -NetworkAccess_classLoader = _EnumLoader(("NetworkAccess",), "NetworkAccess_class") -uri_NetworkAccess_classLoader_False_True_None = _URILoader( - NetworkAccess_classLoader, False, True, None -) -InplaceUpdateRequirement_classLoader = _EnumLoader( - ("InplaceUpdateRequirement",), "InplaceUpdateRequirement_class" -) -uri_InplaceUpdateRequirement_classLoader_False_True_None = _URILoader( - InplaceUpdateRequirement_classLoader, False, True, None -) -ToolTimeLimit_classLoader = _EnumLoader(("ToolTimeLimit",), "ToolTimeLimit_class") -uri_ToolTimeLimit_classLoader_False_True_None = _URILoader( - ToolTimeLimit_classLoader, False, True, None -) -union_of_inttype_or_ExpressionLoader = _UnionLoader( - ( - inttype, - ExpressionLoader, - ) -) -union_of_None_type_or_InputBindingLoader = _UnionLoader( - ( - None_type, - InputBindingLoader, - ) -) -ExpressionTool_classLoader = _EnumLoader(("ExpressionTool",), "ExpressionTool_class") -uri_ExpressionTool_classLoader_False_True_None = _URILoader( - ExpressionTool_classLoader, False, True, None -) -array_of_WorkflowInputParameterLoader = _ArrayLoader(WorkflowInputParameterLoader) -idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader( - array_of_WorkflowInputParameterLoader, "id", "type" -) -array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( - ExpressionToolOutputParameterLoader -) -idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( - array_of_ExpressionToolOutputParameterLoader, "id", "type" -) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1 = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1 -) -union_of_None_type_or_LinkMergeMethodLoader = _UnionLoader( - ( - None_type, - LinkMergeMethodLoader, - ) -) -union_of_None_type_or_PickValueMethodLoader = _UnionLoader( - ( - None_type, - PickValueMethodLoader, - ) -) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2 = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2 -) -array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader) -idmap_in__array_of_WorkflowStepInputLoader = _IdMapLoader( - array_of_WorkflowStepInputLoader, "id", "source" -) -union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( - ( - strtype, - WorkflowStepOutputLoader, - ) -) -array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader( - union_of_strtype_or_WorkflowStepOutputLoader -) -union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( - (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) -) -uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = ( - _URILoader( - union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, - True, - False, - None, - ) -) -array_of_Any_type = _ArrayLoader(Any_type) -union_of_None_type_or_array_of_Any_type = _UnionLoader( - ( - None_type, - array_of_Any_type, - ) -) -idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( - union_of_None_type_or_array_of_Any_type, "class", "None" -) -union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _UnionLoader( - ( - strtype, - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, - OperationLoader, - ) -) -uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_False_False_None = _URILoader( - union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, - False, - False, - None, -) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0 = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 0 -) -union_of_None_type_or_ScatterMethodLoader = _UnionLoader( - ( - None_type, - ScatterMethodLoader, - ) -) -uri_union_of_None_type_or_ScatterMethodLoader_False_True_None = _URILoader( - union_of_None_type_or_ScatterMethodLoader, False, True, None -) -Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") -uri_Workflow_classLoader_False_True_None = _URILoader( - Workflow_classLoader, False, True, None -) -array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) -idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( - array_of_WorkflowOutputParameterLoader, "id", "type" -) -array_of_WorkflowStepLoader = _ArrayLoader(WorkflowStepLoader) -union_of_array_of_WorkflowStepLoader = _UnionLoader((array_of_WorkflowStepLoader,)) -idmap_steps_union_of_array_of_WorkflowStepLoader = _IdMapLoader( - union_of_array_of_WorkflowStepLoader, "id", "None" -) -SubworkflowFeatureRequirement_classLoader = _EnumLoader( - ("SubworkflowFeatureRequirement",), "SubworkflowFeatureRequirement_class" -) -uri_SubworkflowFeatureRequirement_classLoader_False_True_None = _URILoader( - SubworkflowFeatureRequirement_classLoader, False, True, None -) -ScatterFeatureRequirement_classLoader = _EnumLoader( - ("ScatterFeatureRequirement",), "ScatterFeatureRequirement_class" -) -uri_ScatterFeatureRequirement_classLoader_False_True_None = _URILoader( - ScatterFeatureRequirement_classLoader, False, True, None -) -MultipleInputFeatureRequirement_classLoader = _EnumLoader( - ("MultipleInputFeatureRequirement",), "MultipleInputFeatureRequirement_class" -) -uri_MultipleInputFeatureRequirement_classLoader_False_True_None = _URILoader( - MultipleInputFeatureRequirement_classLoader, False, True, None -) -StepInputExpressionRequirement_classLoader = _EnumLoader( - ("StepInputExpressionRequirement",), "StepInputExpressionRequirement_class" -) -uri_StepInputExpressionRequirement_classLoader_False_True_None = _URILoader( - StepInputExpressionRequirement_classLoader, False, True, None -) -Operation_classLoader = _EnumLoader(("Operation",), "Operation_class") -uri_Operation_classLoader_False_True_None = _URILoader( - Operation_classLoader, False, True, None -) -array_of_OperationInputParameterLoader = _ArrayLoader(OperationInputParameterLoader) -idmap_inputs_array_of_OperationInputParameterLoader = _IdMapLoader( - array_of_OperationInputParameterLoader, "id", "type" -) -array_of_OperationOutputParameterLoader = _ArrayLoader(OperationOutputParameterLoader) -idmap_outputs_array_of_OperationOutputParameterLoader = _IdMapLoader( - array_of_OperationOutputParameterLoader, "id", "type" -) -union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _UnionLoader( - ( - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, - OperationLoader, - ) -) -array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _ArrayLoader( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader -) -union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _UnionLoader( - ( - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, - OperationLoader, - array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, - ) -) - - -def load_document( - doc: Any, - baseuri: Optional[str] = None, - loadingOptions: Optional[LoadingOptions] = None, -) -> Any: - if baseuri is None: - baseuri = file_uri(os.getcwd()) + "/" - if loadingOptions is None: - loadingOptions = LoadingOptions() - result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, - doc, - baseuri, - loadingOptions, - ) - return result - - -def load_document_with_metadata( - doc: Any, - baseuri: Optional[str] = None, - loadingOptions: Optional[LoadingOptions] = None, - addl_metadata_fields: Optional[MutableSequence[str]] = None, -) -> Any: - if baseuri is None: - baseuri = file_uri(os.getcwd()) + "/" - if loadingOptions is None: - loadingOptions = LoadingOptions(fileuri=baseuri) - return _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, - doc, - baseuri, - loadingOptions, - addl_metadata_fields=addl_metadata_fields, - ) - - -def load_document_by_string( - string: Any, - uri: str, - loadingOptions: Optional[LoadingOptions] = None, -) -> Any: - yaml = yaml_no_ts() - result = yaml.load(string) - add_lc_filename(result, uri) - - if loadingOptions is None: - loadingOptions = LoadingOptions(fileuri=uri) - - result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, - result, - uri, - loadingOptions, - ) - return result - - -def load_document_by_yaml( - yaml: Any, - uri: str, - loadingOptions: Optional[LoadingOptions] = None, -) -> Any: - """ - Shortcut to load via a YAML object. - yaml: must be from ruamel.yaml.main.YAML.load with preserve_quotes=True - """ - add_lc_filename(yaml, uri) - - if loadingOptions is None: - loadingOptions = LoadingOptions(fileuri=uri) - - result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, - yaml, - uri, - loadingOptions, - ) - return result