diff --git a/.github/workflows/ci_tests.yml b/.github/workflows/ci_tests.yml index 8dfe33758..173c3fd22 100644 --- a/.github/workflows/ci_tests.yml +++ b/.github/workflows/ci_tests.yml @@ -13,23 +13,6 @@ jobs: run: | sudo apt-get update --fix-missing - - name: Workaround - install CMake 3.25.2 since 3.26.0 doesn't work - run: | - sudo apt remove cmake - sudo apt purge --auto-remove cmake - wget http://www.cmake.org/files/v3.25/cmake-3.25.2.tar.gz - tar xf cmake-3.25.2.tar.gz - cd cmake-3.25.2 - ./configure - make - sudo make install - hash -r - cd - - - - name: Show CMake version - run: | - cmake --version - - name: Install dependencies run: | sudo apt-get install \ @@ -45,16 +28,26 @@ jobs: - name: Install Python dependencies run: | - pip install --upgrade pip -r requirements.txt - pip install -r requirements_full.txt - pip install -r requirements_dev.txt - pip install -r requirements_doc.txt + python3 -m pip install --upgrade pip -r requirements.txt + python3 -m pip install -r requirements_full.txt + python3 -m pip install -r requirements_dev.txt + python3 -m pip install -r requirements_doc.txt + - name: check numpy run: | which python3 python3 --version - pip3 --version + python3 -m pip --version python3 -c 'import numpy as np; print(np.get_include())' + + - name: Workaround - install CMake 3.25.2 since 3.26.0 doesn't work + # Normally we would install cmake with the package manager, but + # ubuntu 20.04 doesn't seems to keep older versions of cmake around... + # Fortunately, there exists a pip package + run: | + python3 -m pip install cmake==3.25.2 + cmake --version + - name: configure run: | mkdir build @@ -63,27 +56,32 @@ jobs: CFLAGS='-Wno-missing-field-initializers' \ cmake .. -DFORCE_EXAMPLES=ON -DWITH_FORTRAN=YES cd - + - name: make run: | cd build - make + cmake --build . cd - + - name: install run: | cd build - make install + cmake --install . cd - + - name: make test run: | cd build ctest || ctest --rerun-failed --output-on-failure -V cd - + - name: build Linux wheel run: | cd python - pip install numpy wheel + python3 -m pip install numpy wheel python3 setup.py bdist_wheel cd - + - name: Install python package via setup.py and test the installation run: | cd python diff --git a/bindings/python/datamodel.py b/bindings/python/datamodel.py index eb9ef91e4..d446aecf9 100644 --- a/bindings/python/datamodel.py +++ b/bindings/python/datamodel.py @@ -9,9 +9,11 @@ class DataModelError(dlite.DLiteError): """Raised if the datamodel is inconsistent.""" + class MissingDimensionError(DataModelError): """Raised if a dimension referred to in a property is not defined.""" + class UnusedDimensionError(DataModelError): """Raised if a dimension is not referred to in any property.""" @@ -38,7 +40,7 @@ def __init__(self, uri, schema=None, description=None): schema = property( lambda self: self._schema, lambda self, schema: self._set_schema(schema), - doc='Meta-metadata for the datamodel.', + doc="Meta-metadata for the datamodel.", ) def _set_schema(self, schema): @@ -49,7 +51,7 @@ def _set_schema(self, schema): elif isinstance(schema, str): self._schema = dlite.get_instance(schema) else: - TypeError('`schema` must be a string or a DLite metadata schema.') + TypeError("`schema` must be a string or a DLite metadata schema.") def add_dimension(self, name, description): """Add dimension with given `name` and description to data model.""" @@ -57,9 +59,9 @@ def add_dimension(self, name, description): raise KeyError(f'A dimension named "{name}" already exists') self.dimensions[name] = dlite.Dimension(name, description) - - def add_property(self, name, type, shape=None, unit=None, description=None, - dims=None): + def add_property( + self, name, type, shape=None, unit=None, description=None, dims=None + ): """Add property to data model. Parameters: @@ -91,14 +93,18 @@ def add_property(self, name, type, shape=None, unit=None, description=None, ) def _get_dims_variables(self): - """Returns a set of all dimension names referred to in property dims.""" + """Returns a set of all dimension names referred to in property shapes. + """ names = set() for prop in self.properties.values(): if prop.shape is not None: for dim in prop.shape: tree = ast.parse(dim) - names.update(node.id for node in ast.walk(tree) - if isinstance(node, ast.Name)) + names.update( + node.id + for node in ast.walk(tree) + if isinstance(node, ast.Name) + ) return names def get_missing_dimensions(self): @@ -108,23 +114,24 @@ def get_missing_dimensions(self): return self._get_dims_variables().difference(self.dimensions) def get_unused_dimensions(self): - """Returns a set of dimensions not referred to in any property shapes.""" + """Returns a set of dimensions not referred to in any property shapes. + """ return set(self.dimensions).difference(self._get_dims_variables()) def validate(self): """Raises an exception if there are missing or unused dimensions.""" missing = self.get_missing_dimensions() if missing: - raise MissingDimensionError(f'Missing dimensions: {missing}') + raise MissingDimensionError(f"Missing dimensions: {missing}") unused = self.get_unused_dimensions() if unused: - raise UnusedDimensionError(f'Unused dimensions: {unused}') + raise UnusedDimensionError(f"Unused dimensions: {unused}") def get(self): """Returns a DLite Metadata created from the datamodel.""" self.validate() dims = [len(self.dimensions), len(self.properties)] - if 'nrelations' in self.schema: + if "nrelations" in self.schema: dims.append(len(self.relations)) # Hmm, there seems to be a bug when instantiating from schema. @@ -134,15 +141,16 @@ def get(self): # For now, lets assume that it is EntitySchema. if self.schema.uri != dlite.ENTITY_SCHEMA: raise NotImplementedError( - f'Currently only entity schema is supported') + f"Currently only entity schema is supported" + ) - #meta = self.schema(dims, id=self.uri) - #meta.description = self.description - #meta['dimensions'] = list(self.dimensions.values()) - #meta['properties'] = list(self.properties.values()) - #if 'relations' in meta: + # meta = self.schema(dims, id=self.uri) + # meta.description = self.description + # meta['dimensions'] = list(self.dimensions.values()) + # meta['properties'] = list(self.properties.values()) + # if 'relations' in meta: # meta['relations'] = self.relations - #return meta + # return meta return dlite.Instance.create_metadata( uri=self.uri, diff --git a/bindings/python/dlite-storage-python.i b/bindings/python/dlite-storage-python.i index 1f6ce4c91..13dd1633c 100644 --- a/bindings/python/dlite-storage-python.i +++ b/bindings/python/dlite-storage-python.i @@ -8,7 +8,7 @@ # Override default __init__() def __init__(self, driver_or_url, location=None, options=None): loc = str(location) if location else None - _dlite.Instance_swiginit(self, _dlite.new_Storage( + _dlite.Storage_swiginit(self, _dlite.new_Storage( driver_or_url=driver_or_url, location=loc, options=options)) def __enter__(self): @@ -41,10 +41,25 @@ ) @classmethod - def create_from_url(cls, url): + def from_url(cls, url): """Create a new storage from `url`.""" return cls(url) + @classmethod + def load_plugins(cls): + """Load all storage plugins.""" + _dlite._load_all_storage_plugins() + + @classmethod + def unload_plugin(cls, name): + """Unload storage plugin with this name.""" + _dlite._unload_storage_plugin(str(name)) + + @classmethod + def plugin_help(cls, name): + """Return documentation of storage plogin with this name.""" + return _dlite._storage_plugin_help(name) + def instances(self, pattern=None): """Returns an iterator over all instances in storage whos metadata URI matches `pattern`.""" @@ -64,6 +79,7 @@ driver = property(get_driver, doc='Name of driver associated with this storage') + %} } @@ -77,12 +93,24 @@ %} } - %extend StoragePluginIter { %pythoncode %{ + # Override default __init__() + def __init__(self): + """Iterates over loaded storage plugins.""" + _dlite.StoragePluginIter_swiginit( + self, _dlite.new_StoragePluginIter()) + # Keep a reference to self, such that it is not garbage-collected + # before end of iterations + if not hasattr(_dlite, "_storage_plugin_iters"): + _dlite._storage_plugin_iters = {} + _dlite._storage_plugin_iters[id(self.iter)] = self + def __next__(self): name = self.next() if not name: + # Delete reference to iterator object stored away in __init__() + del _dlite._storage_plugin_iters[id(self.this)] raise StopIteration() return name %} diff --git a/bindings/python/dlite-storage.i b/bindings/python/dlite-storage.i index 3a6c31c49..f3e78d056 100644 --- a/bindings/python/dlite-storage.i +++ b/bindings/python/dlite-storage.i @@ -2,8 +2,17 @@ %{ #include "dlite.h" +#include "dlite-errors.h" #include "dlite-storage.h" #include "dlite-storage-plugins.h" + +char *_storage_plugin_help(const char *name) { + const DLiteStoragePlugin *api = dlite_storage_plugin_get(name); + if (api->help) return api->help(api); + return dlite_err(dliteUnsupportedError, + "\"%s\" storage does not support help", name), NULL; +} + %} @@ -68,8 +77,8 @@ enum _DLiteIDFlag { %feature("docstring", "\ Represents a data storage. -Parameters ----------- +Arguments +--------- driver_or_url : string Name of driver used to connect to the storage or, if `location` is not given, the URL to the storage: @@ -195,9 +204,9 @@ Iterates over loaded storage plugins. DLiteStoragePluginIter *iter; }; %} -%feature("docstring", "") new_StoragePluginIter; %extend StoragePluginIter { StoragePluginIter(void) { + //dlite_storage_plugin_load_all(); DLiteStoragePluginIter *iter = dlite_storage_plugin_iter_create(); return (struct StoragePluginIter *)iter; } @@ -218,9 +227,14 @@ Iterates over loaded storage plugins. } -%rename(storage_unload) dlite_storage_plugin_unload; +%rename(_load_all_storage_plugins) dlite_storage_plugin_load_all; +int dlite_storage_plugin_load_all(); + +%rename(_unload_storage_plugin) dlite_storage_plugin_unload; int dlite_storage_plugin_unload(const char *name); +char *_storage_plugin_help(const char *name); + /* ----------------------------------- * Target language-spesific extensions diff --git a/bindings/python/doxy2swig.py b/bindings/python/doxy2swig.py index 847a3504f..fbc1cef21 100755 --- a/bindings/python/doxy2swig.py +++ b/bindings/python/doxy2swig.py @@ -58,44 +58,47 @@ def my_open_read(source): return source else: try: - return open(source, encoding='utf-8') + return open(source, encoding="utf-8") except TypeError: return open(source) + def my_open_write(dest): if hasattr(dest, "write"): return dest else: try: - return open(dest, 'w', encoding='utf-8') + return open(dest, "w", encoding="utf-8") except TypeError: - return open(dest, 'w') + return open(dest, "w") + # MARK: Text handling: -def shift(txt, indent = ' ', prepend = ''): +def shift(txt, indent=" ", prepend=""): """Return a list corresponding to the lines of text in the `txt` list indented by `indent`. Prepend instead the string given in `prepend` to the beginning of the first line. Note that if len(prepend) > len(indent), then - `prepend` will be truncated (doing better is tricky!). This preserves a + `prepend` will be truncated (doing better is tricky!). This preserves a special '' entry at the end of `txt` (see `do_para` for the meaning). """ if type(indent) is int: - indent = indent * ' ' - special_end = txt[-1:] == [''] - lines = ''.join(txt).splitlines(True) - for i in range(1,len(lines)): + indent = indent * " " + special_end = txt[-1:] == [""] + lines = "".join(txt).splitlines(True) + for i in range(1, len(lines)): if lines[i].strip() or indent.strip(): lines[i] = indent + lines[i] if not lines: return prepend - prepend = prepend[:len(indent)] - indent = indent[len(prepend):] + prepend = prepend[: len(indent)] + indent = indent[len(prepend) :] lines[0] = prepend + indent + lines[0] - ret = [''.join(lines)] + ret = ["".join(lines)] if special_end: - ret.append('') + ret.append("") return ret + class Doxy2SWIG: """Converts Doxygen generated XML files into a file containing docstrings that can be used by SWIG-1.3.x that have support for @@ -104,14 +107,17 @@ class Doxy2SWIG: """ - def __init__(self, src, - with_function_signature = False, - with_type_info = False, - with_constructor_list = False, - with_attribute_list = False, - with_overloaded_functions = False, - textwidth = 80, - quiet = False): + def __init__( + self, + src, + with_function_signature=False, + with_type_info=False, + with_constructor_list=False, + with_attribute_list=False, + with_overloaded_functions=False, + textwidth=80, + quiet=False, + ): """Initialize the instance given a source object. `src` can be a file or filename. If you do not want to include function definitions from doxygen then set @@ -131,7 +137,7 @@ def __init__(self, src, # state: self.indent = 0 - self.listitem = '' + self.listitem = "" self.pieces = [] f = my_open_read(src) @@ -139,21 +145,35 @@ def __init__(self, src, self.xmldoc = minidom.parse(f).documentElement f.close() - self.pieces.append('\n// File: %s\n' % - os.path.basename(f.name)) + self.pieces.append("\n// File: %s\n" % os.path.basename(f.name)) - self.space_re = re.compile(r'\s+') + self.space_re = re.compile(r"\s+") self.lead_spc = re.compile(r'^(%feature\S+\s+\S+\s*?)"\s+(\S)') self.multi = 0 - self.ignores = ['inheritancegraph', 'param', 'listofallmembers', - 'innerclass', 'name', 'declname', 'incdepgraph', - 'invincdepgraph', 'programlisting', 'type', - 'references', 'referencedby', 'location', - 'collaborationgraph', 'reimplements', - 'reimplementedby', 'derivedcompoundref', - 'basecompoundref', - 'argsstring', 'definition', 'exceptions'] - #self.generics = [] + self.ignores = [ + "inheritancegraph", + "param", + "listofallmembers", + "innerclass", + "name", + "declname", + "incdepgraph", + "invincdepgraph", + "programlisting", + "type", + "references", + "referencedby", + "location", + "collaborationgraph", + "reimplements", + "reimplementedby", + "derivedcompoundref", + "basecompoundref", + "argsstring", + "definition", + "exceptions", + ] + # self.generics = [] def generate(self): """Parses the file set in the initialization. The resulting @@ -164,8 +184,8 @@ def generate(self): def write(self, fname): o = my_open_write(fname) - o.write(''.join(self.pieces)) - o.write('\n') + o.write("".join(self.pieces)) + o.write("\n") o.close() def parse(self, node): @@ -182,14 +202,14 @@ def parse_Document(self, node): def parse_Text(self, node): txt = node.data - if txt == ' ': + if txt == " ": # this can happen when two tags follow in a text, e.g., # " ... $..." etc. # here we want to keep the space. self.add_text(txt) return - txt = txt.replace('\\', r'\\') - txt = txt.replace('"', r'\"') + txt = txt.replace("\\", r"\\") + txt = txt.replace('"', r"\"") # ignore pure whitespace m = self.space_re.match(txt) if not (m and len(m.group()) == len(txt)): @@ -216,10 +236,12 @@ def parse_Element(self, node): handlerMethod(node) else: self.subnode_parse(node) - #if name not in self.generics: self.generics.append(name) + # if name not in self.generics: self.generics.append(name) -# MARK: Special format parsing - def subnode_parse(self, node, pieces=None, indent=0, ignore=[], restrict=None): + # MARK: Special format parsing + def subnode_parse( + self, node, pieces=None, indent=0, ignore=[], restrict=None + ): """Parse the subnodes of a given node. Subnodes with tags in the `ignore` list are ignored. If pieces is given, use this as target for the parse results instead of self.pieces. Indent all lines by the amount @@ -230,14 +252,14 @@ def subnode_parse(self, node, pieces=None, indent=0, ignore=[], restrict=None): else: old_pieces = [] if type(indent) is int: - indent = indent * ' ' + indent = indent * " " if len(indent) > 0: - pieces = ''.join(self.pieces) - i_piece = pieces[:len(indent)] - if self.pieces[-1:] == ['']: - self.pieces = [pieces[len(indent):]] + [''] + pieces = "".join(self.pieces) + i_piece = pieces[: len(indent)] + if self.pieces[-1:] == [""]: + self.pieces = [pieces[len(indent) :]] + [""] elif self.pieces != []: - self.pieces = [pieces[len(indent):]] + self.pieces = [pieces[len(indent) :]] self.indent += len(indent) for n in node.childNodes: if restrict is not None: @@ -258,8 +280,8 @@ def surround_parse(self, node, pre_char, post_char): self.add_text(pre_char) self.subnode_parse(node) self.add_text(post_char) - -# MARK: Helper functions + + # MARK: Helper functions def get_specific_subnodes(self, node, name, recursive=0): """Given a node and a name, return a list of child `ELEMENT_NODEs`, that have a `tagName` matching the `name`. Search recursively for `recursive` @@ -269,7 +291,7 @@ def get_specific_subnodes(self, node, name, recursive=0): ret = [x for x in children if x.tagName == name] if recursive > 0: for x in children: - ret.extend(self.get_specific_subnodes(x, name, recursive-1)) + ret.extend(self.get_specific_subnodes(x, name, recursive - 1)) return ret def get_specific_nodes(self, node, names): @@ -278,9 +300,11 @@ def get_specific_nodes(self, node, names): `ELEMENT_NODEs`, that have a `tagName` equal to the name. """ - nodes = [(x.tagName, x) for x in node.childNodes - if x.nodeType == x.ELEMENT_NODE and - x.tagName in names] + nodes = [ + (x.tagName, x) + for x in node.childNodes + if x.nodeType == x.ELEMENT_NODE and x.tagName in names + ] return dict(nodes) def add_text(self, value): @@ -294,28 +318,30 @@ def start_new_paragraph(self): """Make sure to create an empty line. This is overridden, if the previous text ends with the special marker ''. In that case, nothing is done. """ - if self.pieces[-1:] == ['']: # respect special marker + if self.pieces[-1:] == [""]: # respect special marker return - elif self.pieces == []: # first paragraph, add '\n', override with '' - self.pieces = ['\n'] - elif self.pieces[-1][-1:] != '\n': # previous line not ended - self.pieces.extend([' \n' ,'\n']) - else: #default - self.pieces.append('\n') + elif self.pieces == []: # first paragraph, add '\n', override with '' + self.pieces = ["\n"] + elif self.pieces[-1][-1:] != "\n": # previous line not ended + self.pieces.extend([" \n", "\n"]) + else: # default + self.pieces.append("\n") def add_line_with_subsequent_indent(self, line, indent=4): """Add line of text and wrap such that subsequent lines are indented by `indent` spaces. """ if isinstance(line, (list, tuple)): - line = ''.join(line) + line = "".join(line) line = line.strip() - width = self.textwidth-self.indent-indent + width = self.textwidth - self.indent - indent wrapped_lines = textwrap.wrap(line[indent:], width=width) for i in range(len(wrapped_lines)): - if wrapped_lines[i] != '': - wrapped_lines[i] = indent * ' ' + wrapped_lines[i] - self.pieces.append(line[:indent] + '\n'.join(wrapped_lines)[indent:] + ' \n') + if wrapped_lines[i] != "": + wrapped_lines[i] = indent * " " + wrapped_lines[i] + self.pieces.append( + line[:indent] + "\n".join(wrapped_lines)[indent:] + " \n" + ) def extract_text(self, node): """Return the string representation of the node or list of nodes by parsing the @@ -324,100 +350,117 @@ def extract_text(self, node): """ if not isinstance(node, (list, tuple)): node = [node] - pieces, self.pieces = self.pieces, [''] + pieces, self.pieces = self.pieces, [""] for n in node: for sn in n.childNodes: self.parse(sn) - ret = ''.join(self.pieces) + ret = "".join(self.pieces) self.pieces = pieces return ret def get_function_signature(self, node): """Returns the function signature string for memberdef nodes.""" - name = self.extract_text(self.get_specific_subnodes(node, 'name')) + name = self.extract_text(self.get_specific_subnodes(node, "name")) if self.with_type_info: - argsstring = self.extract_text(self.get_specific_subnodes(node, 'argsstring')) + argsstring = self.extract_text( + self.get_specific_subnodes(node, "argsstring") + ) else: argsstring = [] param_id = 1 - for n_param in self.get_specific_subnodes(node, 'param'): - declname = self.extract_text(self.get_specific_subnodes(n_param, 'declname')) + for n_param in self.get_specific_subnodes(node, "param"): + declname = self.extract_text( + self.get_specific_subnodes(n_param, "declname") + ) if not declname: - declname = 'arg' + str(param_id) - defval = self.extract_text(self.get_specific_subnodes(n_param, 'defval')) + declname = "arg" + str(param_id) + defval = self.extract_text( + self.get_specific_subnodes(n_param, "defval") + ) if defval: - defval = '=' + defval + defval = "=" + defval argsstring.append(declname + defval) param_id = param_id + 1 - argsstring = '(' + ', '.join(argsstring) + ')' - type = self.extract_text(self.get_specific_subnodes(node, 'type')) + argsstring = "(" + ", ".join(argsstring) + ")" + type = self.extract_text(self.get_specific_subnodes(node, "type")) function_definition = name + argsstring - if type != '' and type != 'void': - function_definition = function_definition + ' -> ' + type - return '`' + function_definition + '` ' + if type != "" and type != "void": + function_definition = function_definition + " -> " + type + return "`" + function_definition + "` " -# MARK: Special parsing tasks (need to be called manually) + # MARK: Special parsing tasks (need to be called manually) def make_constructor_list(self, constructor_nodes, classname): """Produces the "Constructors" section and the constructor signatures (since swig does not do so for classes) for class docstrings.""" if constructor_nodes == []: return - self.add_text(['\n', 'Constructors', - '\n', '------------']) + self.add_text(["\n", "Constructors", "\n", "------------"]) for n in constructor_nodes: - self.add_text('\n') - self.add_line_with_subsequent_indent('* ' + self.get_function_signature(n)) - self.subnode_parse(n, pieces = [], indent=4, ignore=['definition', 'name']) + self.add_text("\n") + self.add_line_with_subsequent_indent( + "* " + self.get_function_signature(n) + ) + self.subnode_parse( + n, pieces=[], indent=4, ignore=["definition", "name"] + ) def make_attribute_list(self, node): """Produces the "Attributes" section in class docstrings for public member variables (attributes). """ atr_nodes = [] - for n in self.get_specific_subnodes(node, 'memberdef', recursive=2): - if n.attributes['kind'].value == 'variable' and n.attributes['prot'].value == 'public': + for n in self.get_specific_subnodes(node, "memberdef", recursive=2): + if ( + n.attributes["kind"].value == "variable" + and n.attributes["prot"].value == "public" + ): atr_nodes.append(n) if not atr_nodes: return - self.add_text(['\n', 'Attributes', - '\n', '----------']) + self.add_text(["\n", "Attributes", "\n", "----------"]) for n in atr_nodes: - name = self.extract_text(self.get_specific_subnodes(n, 'name')) - self.add_text(['\n* ', '`', name, '`', ' : ']) - self.add_text(['`', self.extract_text(self.get_specific_subnodes(n, 'type')), '`']) - self.add_text(' \n') - restrict = ['briefdescription', 'detaileddescription'] - self.subnode_parse(n, pieces=[''], indent=4, restrict=restrict) + name = self.extract_text(self.get_specific_subnodes(n, "name")) + self.add_text(["\n* ", "`", name, "`", " : "]) + self.add_text( + [ + "`", + self.extract_text(self.get_specific_subnodes(n, "type")), + "`", + ] + ) + self.add_text(" \n") + restrict = ["briefdescription", "detaileddescription"] + self.subnode_parse(n, pieces=[""], indent=4, restrict=restrict) def get_memberdef_nodes_and_signatures(self, node, kind): """Collects the memberdef nodes and corresponding signatures that correspond to public function entries that are at most depth 2 deeper - than the current (compounddef) node. Returns a dictionary with + than the current (compounddef) node. Returns a dictionary with function signatures (what swig expects after the %feature directive) as keys, and a list of corresponding memberdef nodes as values.""" sig_dict = {} - sig_prefix = '' - if kind in ('file', 'namespace'): - ns_node = node.getElementsByTagName('innernamespace') - if not ns_node and kind == 'namespace': - ns_node = node.getElementsByTagName('compoundname') + sig_prefix = "" + if kind in ("file", "namespace"): + ns_node = node.getElementsByTagName("innernamespace") + if not ns_node and kind == "namespace": + ns_node = node.getElementsByTagName("compoundname") if ns_node: - sig_prefix = self.extract_text(ns_node[0]) + '::' - elif kind in ('class', 'struct'): + sig_prefix = self.extract_text(ns_node[0]) + "::" + elif kind in ("class", "struct"): # Get the full function name. - cn_node = node.getElementsByTagName('compoundname') - sig_prefix = self.extract_text(cn_node[0]) + '::' + cn_node = node.getElementsByTagName("compoundname") + sig_prefix = self.extract_text(cn_node[0]) + "::" - md_nodes = self.get_specific_subnodes(node, 'memberdef', recursive=2) + md_nodes = self.get_specific_subnodes(node, "memberdef", recursive=2) for n in md_nodes: - if n.attributes['prot'].value != 'public': + if n.attributes["prot"].value != "public": continue - if n.attributes['kind'].value in ['variable', 'typedef']: + if n.attributes["kind"].value in ["variable", "typedef"]: continue - if not self.get_specific_subnodes(n, 'definition'): + if not self.get_specific_subnodes(n, "definition"): continue - name = self.extract_text(self.get_specific_subnodes(n, 'name')) - if name[:8] == 'operator': + name = self.extract_text(self.get_specific_subnodes(n, "name")) + if name[:8] == "operator": continue sig = sig_prefix + name if sig in sig_dict: @@ -425,15 +468,21 @@ def get_memberdef_nodes_and_signatures(self, node, kind): else: sig_dict[sig] = [n] return sig_dict - - def handle_typical_memberdefs_no_overload(self, signature, memberdef_nodes): + + def handle_typical_memberdefs_no_overload( + self, signature, memberdef_nodes + ): """Produce standard documentation for memberdef_nodes.""" for n in memberdef_nodes: - self.add_text(['\n', '%feature("docstring") ', signature, ' "', '\n']) + self.add_text( + ["\n", '%feature("docstring") ', signature, ' "', "\n"] + ) if self.with_function_signature: - self.add_line_with_subsequent_indent(self.get_function_signature(n)) - self.subnode_parse(n, pieces=[], ignore=['definition', 'name']) - self.add_text(['";', '\n']) + self.add_line_with_subsequent_indent( + self.get_function_signature(n) + ) + self.subnode_parse(n, pieces=[], ignore=["definition", "name"]) + self.add_text(['";', "\n"]) def handle_typical_memberdefs(self, signature, memberdef_nodes): """Produces docstring entries containing an "Overloaded function" @@ -442,76 +491,82 @@ def handle_typical_memberdefs(self, signature, memberdef_nodes): normal documentation. """ if len(memberdef_nodes) == 1 or not self.with_overloaded_functions: - self.handle_typical_memberdefs_no_overload(signature, memberdef_nodes) + self.handle_typical_memberdefs_no_overload( + signature, memberdef_nodes + ) return - self.add_text(['\n', '%feature("docstring") ', signature, ' "', '\n']) + self.add_text(["\n", '%feature("docstring") ', signature, ' "', "\n"]) if self.with_function_signature: for n in memberdef_nodes: - self.add_line_with_subsequent_indent(self.get_function_signature(n)) - self.add_text('\n') - self.add_text(['Overloaded function', '\n', - '-------------------']) + self.add_line_with_subsequent_indent( + self.get_function_signature(n) + ) + self.add_text("\n") + self.add_text(["Overloaded function", "\n", "-------------------"]) for n in memberdef_nodes: - self.add_text('\n') - self.add_line_with_subsequent_indent('* ' + self.get_function_signature(n)) - self.subnode_parse(n, pieces=[], indent=4, ignore=['definition', 'name']) - self.add_text(['";', '\n']) - - -# MARK: Tag handlers + self.add_text("\n") + self.add_line_with_subsequent_indent( + "* " + self.get_function_signature(n) + ) + self.subnode_parse( + n, pieces=[], indent=4, ignore=["definition", "name"] + ) + self.add_text(['";', "\n"]) + + # MARK: Tag handlers def do_linebreak(self, node): - self.add_text(' ') - + self.add_text(" ") + def do_ndash(self, node): - self.add_text('--') + self.add_text("--") def do_mdash(self, node): - self.add_text('---') + self.add_text("---") def do_emphasis(self, node): - self.surround_parse(node, '*', '*') + self.surround_parse(node, "*", "*") def do_bold(self, node): - self.surround_parse(node, '**', '**') - + self.surround_parse(node, "**", "**") + def do_computeroutput(self, node): - self.surround_parse(node, '`', '`') + self.surround_parse(node, "`", "`") def do_heading(self, node): self.start_new_paragraph() - pieces, self.pieces = self.pieces, [''] - level = int(node.attributes['level'].value) + pieces, self.pieces = self.pieces, [""] + level = int(node.attributes["level"].value) self.subnode_parse(node) if level == 1: - self.pieces.insert(0, '\n') - self.add_text(['\n', len(''.join(self.pieces).strip()) * '=']) + self.pieces.insert(0, "\n") + self.add_text(["\n", len("".join(self.pieces).strip()) * "="]) elif level == 2: - self.add_text(['\n', len(''.join(self.pieces).strip()) * '-']) + self.add_text(["\n", len("".join(self.pieces).strip()) * "-"]) elif level >= 3: - self.pieces.insert(0, level * '#' + ' ') + self.pieces.insert(0, level * "#" + " ") # make following text have no gap to the heading: - pieces.extend([''.join(self.pieces) + ' \n', '']) + pieces.extend(["".join(self.pieces) + " \n", ""]) self.pieces = pieces - + def do_verbatim(self, node): self.start_new_paragraph() - self.subnode_parse(node, pieces=[''], indent=4) - + self.subnode_parse(node, pieces=[""], indent=4) + def do_blockquote(self, node): self.start_new_paragraph() - self.subnode_parse(node, pieces=[''], indent='> ') - + self.subnode_parse(node, pieces=[""], indent="> ") + def do_hruler(self, node): self.start_new_paragraph() - self.add_text('* * * * * \n') - + self.add_text("* * * * * \n") + def do_includes(self, node): - self.add_text('\nC++ includes: ') + self.add_text("\nC++ includes: ") self.subnode_parse(node) - self.add_text('\n') + self.add_text("\n") -# MARK: Para tag handler + # MARK: Para tag handler def do_para(self, node): """This is the only place where text wrapping is automatically performed. Generally, this function parses the node (locally), wraps the text, and @@ -524,52 +579,52 @@ def do_para(self, node): Paragraphs always end with ' \n', but if the parsed content ends with the special symbol '', this is passed on. """ - if self.pieces[-1:] == ['']: + if self.pieces[-1:] == [""]: pieces, self.pieces = self.pieces[:-2], self.pieces[-2:-1] else: - self.add_text('\n') - pieces, self.pieces = self.pieces, [''] + self.add_text("\n") + pieces, self.pieces = self.pieces, [""] self.subnode_parse(node) - dont_end_paragraph = self.pieces[-1:] == [''] + dont_end_paragraph = self.pieces[-1:] == [""] # Now do the text wrapping: width = self.textwidth - self.indent wrapped_para = [] - for line in ''.join(self.pieces).splitlines(): - keep_markdown_newline = line[-2:] == ' ' + for line in "".join(self.pieces).splitlines(): + keep_markdown_newline = line[-2:] == " " w_line = textwrap.wrap(line, width=width, break_long_words=False) if w_line == []: - w_line = [''] + w_line = [""] if keep_markdown_newline: - w_line[-1] = w_line[-1] + ' ' + w_line[-1] = w_line[-1] + " " for wl in w_line: - wrapped_para.append(wl + '\n') + wrapped_para.append(wl + "\n") if wrapped_para: - if wrapped_para[-1][-3:] != ' \n': - wrapped_para[-1] = wrapped_para[-1][:-1] + ' \n' + if wrapped_para[-1][-3:] != " \n": + wrapped_para[-1] = wrapped_para[-1][:-1] + " \n" if dont_end_paragraph: - wrapped_para.append('') + wrapped_para.append("") pieces.extend(wrapped_para) self.pieces = pieces -# MARK: List tag handlers + # MARK: List tag handlers def do_itemizedlist(self, node): - if self.listitem == '': + if self.listitem == "": self.start_new_paragraph() - elif self.pieces != [] and self.pieces[-1:] != ['']: - self.add_text('\n') + elif self.pieces != [] and self.pieces[-1:] != [""]: + self.add_text("\n") listitem = self.listitem - if self.listitem in ['*', '-']: - self.listitem = '-' + if self.listitem in ["*", "-"]: + self.listitem = "-" else: - self.listitem = '*' + self.listitem = "*" self.subnode_parse(node) self.listitem = listitem def do_orderedlist(self, node): - if self.listitem == '': + if self.listitem == "": self.start_new_paragraph() - elif self.pieces != [] and self.pieces[-1:] != ['']: - self.add_text('\n') + elif self.pieces != [] and self.pieces[-1:] != [""]: + self.add_text("\n") listitem = self.listitem self.listitem = 0 self.subnode_parse(node) @@ -578,96 +633,107 @@ def do_orderedlist(self, node): def do_listitem(self, node): try: self.listitem = int(self.listitem) + 1 - item = str(self.listitem) + '. ' + item = str(self.listitem) + ". " except: - item = str(self.listitem) + ' ' + item = str(self.listitem) + " " self.subnode_parse(node, item, indent=4) -# MARK: Parameter list tag handlers + # MARK: Parameter list tag handlers def do_parameterlist(self, node): self.start_new_paragraph() - text = 'unknown' + text = "unknown" for key, val in node.attributes.items(): - if key == 'kind': - if val == 'param': - text = 'Parameters' - elif val == 'exception': - text = 'Exceptions' - elif val == 'retval': - text = 'Returns' + if key == "kind": + if val == "param": + text = "Parameters" + elif val == "exception": + text = "Exceptions" + elif val == "retval": + text = "Returns" else: text = val break if self.indent == 0: - self.add_text([text, '\n', len(text) * '-', '\n']) + self.add_text([text, "\n", len(text) * "-", "\n"]) else: - self.add_text([text, ': \n']) + self.add_text([text, ": \n"]) self.subnode_parse(node) def do_parameteritem(self, node): - self.subnode_parse(node, pieces=['* ', '']) + self.subnode_parse(node, pieces=["* ", ""]) def do_parameternamelist(self, node): self.subnode_parse(node) - self.add_text([' :', ' \n']) - + self.add_text([" :", " \n"]) + def do_parametername(self, node): - if self.pieces != [] and self.pieces != ['* ', '']: - self.add_text(', ') + if self.pieces != [] and self.pieces != ["* ", ""]: + self.add_text(", ") data = self.extract_text(node) - self.add_text(['`', data, '`']) + self.add_text(["`", data, "`"]) def do_parameterdescription(self, node): - self.subnode_parse(node, pieces=[''], indent=4) + self.subnode_parse(node, pieces=[""], indent=4) -# MARK: Section tag handler + # MARK: Section tag handler def do_simplesect(self, node): - kind = node.attributes['kind'].value - if kind in ('date', 'rcs', 'version'): + kind = node.attributes["kind"].value + if kind in ("date", "rcs", "version"): return self.start_new_paragraph() - if kind == 'warning': - self.subnode_parse(node, pieces=['**Warning**: ',''], indent=4) - elif kind == 'see': - self.subnode_parse(node, pieces=['See also: ',''], indent=4) - elif kind == 'return': + if kind == "warning": + self.subnode_parse(node, pieces=["**Warning**: ", ""], indent=4) + elif kind == "see": + self.subnode_parse(node, pieces=["See also: ", ""], indent=4) + elif kind == "return": if self.indent == 0: - pieces = ['Returns', '\n', len('Returns') * '-', '\n', ''] + pieces = ["Returns", "\n", len("Returns") * "-", "\n", ""] else: - pieces = ['Returns:', '\n', ''] + pieces = ["Returns:", "\n", ""] self.subnode_parse(node, pieces=pieces) else: - self.subnode_parse(node, pieces=[kind + ': ',''], indent=4) + self.subnode_parse(node, pieces=[kind + ": ", ""], indent=4) -# MARK: %feature("docstring") producing tag handlers + # MARK: %feature("docstring") producing tag handlers def do_compounddef(self, node): """This produces %feature("docstring") entries for classes, and handles - class, namespace and file memberdef entries specially to allow for + class, namespace and file memberdef entries specially to allow for overloaded functions. For other cases, passes parsing on to standard handlers (which may produce unexpected results). """ - kind = node.attributes['kind'].value - if kind in ('class', 'struct'): - prot = node.attributes['prot'].value - if prot != 'public': + kind = node.attributes["kind"].value + if kind in ("class", "struct"): + prot = node.attributes["prot"].value + if prot != "public": return - self.add_text('\n\n') - classdefn = self.extract_text(self.get_specific_subnodes(node, 'compoundname')) - classname = classdefn.split('::')[-1] + self.add_text("\n\n") + classdefn = self.extract_text( + self.get_specific_subnodes(node, "compoundname") + ) + classname = classdefn.split("::")[-1] self.add_text('%%feature("docstring") %s "\n' % classdefn) if self.with_constructor_list: constructor_nodes = [] - for n in self.get_specific_subnodes(node, 'memberdef', recursive=2): - if n.attributes['prot'].value == 'public': - if self.extract_text(self.get_specific_subnodes(n, 'definition')) == classdefn + '::' + classname: + for n in self.get_specific_subnodes( + node, "memberdef", recursive=2 + ): + if n.attributes["prot"].value == "public": + if ( + self.extract_text( + self.get_specific_subnodes(n, "definition") + ) + == classdefn + "::" + classname + ): constructor_nodes.append(n) for n in constructor_nodes: - self.add_line_with_subsequent_indent(self.get_function_signature(n)) + self.add_line_with_subsequent_indent( + self.get_function_signature(n) + ) - names = ('briefdescription','detaileddescription') + names = ("briefdescription", "detaileddescription") sub_dict = self.get_specific_nodes(node, names) - for n in ('briefdescription','detaileddescription'): + for n in ("briefdescription", "detaileddescription"): if n in sub_dict: self.parse(sub_dict[n]) if self.with_constructor_list: @@ -675,62 +741,67 @@ def do_compounddef(self, node): if self.with_attribute_list: self.make_attribute_list(node) - sub_list = self.get_specific_subnodes(node, 'includes') + sub_list = self.get_specific_subnodes(node, "includes") if sub_list: self.parse(sub_list[0]) - self.add_text(['";', '\n']) - - names = ['compoundname', 'briefdescription','detaileddescription', 'includes'] - self.subnode_parse(node, ignore = names) - - elif kind in ('file', 'namespace'): - nodes = node.getElementsByTagName('sectiondef') + self.add_text(['";', "\n"]) + + names = [ + "compoundname", + "briefdescription", + "detaileddescription", + "includes", + ] + self.subnode_parse(node, ignore=names) + + elif kind in ("file", "namespace"): + nodes = node.getElementsByTagName("sectiondef") for n in nodes: self.parse(n) # now explicitely handle possibly overloaded member functions. - if kind in ['class', 'struct','file', 'namespace']: + if kind in ["class", "struct", "file", "namespace"]: md_nodes = self.get_memberdef_nodes_and_signatures(node, kind) for sig in md_nodes: self.handle_typical_memberdefs(sig, md_nodes[sig]) - + def do_memberdef(self, node): """Handle cases outside of class, struct, file or namespace. These are now dealt with by `handle_overloaded_memberfunction`. Do these even exist??? """ - prot = node.attributes['prot'].value - id = node.attributes['id'].value - kind = node.attributes['kind'].value + prot = node.attributes["prot"].value + id = node.attributes["id"].value + kind = node.attributes["kind"].value tmp = node.parentNode.parentNode.parentNode - compdef = tmp.getElementsByTagName('compounddef')[0] - cdef_kind = compdef.attributes['kind'].value - if cdef_kind in ('file', 'namespace', 'class', 'struct'): + compdef = tmp.getElementsByTagName("compounddef")[0] + cdef_kind = compdef.attributes["kind"].value + if cdef_kind in ("file", "namespace", "class", "struct"): # These cases are now handled by `handle_typical_memberdefs` return - if prot != 'public': + if prot != "public": return - first = self.get_specific_nodes(node, ('definition', 'name')) - name = self.extract_text(first['name']) - if name[:8] == 'operator': # Don't handle operators yet. + first = self.get_specific_nodes(node, ("definition", "name")) + name = self.extract_text(first["name"]) + if name[:8] == "operator": # Don't handle operators yet. return - if not 'definition' in first or kind in ['variable', 'typedef']: + if not "definition" in first or kind in ["variable", "typedef"]: return - data = self.extract_text(first['definition']) - self.add_text('\n') - self.add_text(['/* where did this entry come from??? */', '\n']) + data = self.extract_text(first["definition"]) + self.add_text("\n") + self.add_text(["/* where did this entry come from??? */", "\n"]) self.add_text('%feature("docstring") %s "\n%s' % (data, data)) for n in node.childNodes: if n not in first.values(): self.parse(n) - self.add_text(['";', '\n']) - -# MARK: Entry tag handlers (dont print anything meaningful) + self.add_text(['";', "\n"]) + + # MARK: Entry tag handlers (dont print anything meaningful) def do_sectiondef(self, node): - kind = node.attributes['kind'].value - if kind in ('public-func', 'func', 'user-defined', ''): + kind = node.attributes["kind"].value + if kind in ("public-func", "func", "user-defined", ""): self.subnode_parse(node) def do_header(self, node): @@ -738,7 +809,7 @@ def do_header(self, node): which should not be printed as such, so we comment it in the output.""" data = self.extract_text(node) - self.add_text('\n/*\n %s \n*/\n' % data) + self.add_text("\n/*\n %s \n*/\n" % data) # If our immediate sibling is a 'description' node then we # should comment that out also and remove it from the parent # node's children. @@ -746,93 +817,121 @@ def do_header(self, node): idx = parent.childNodes.index(node) if len(parent.childNodes) >= idx + 2: nd = parent.childNodes[idx + 2] - if nd.nodeName == 'description': + if nd.nodeName == "description": nd = parent.removeChild(nd) - self.add_text('\n/*') + self.add_text("\n/*") self.subnode_parse(nd) - self.add_text('\n*/\n') + self.add_text("\n*/\n") def do_member(self, node): - kind = node.attributes['kind'].value - refid = node.attributes['refid'].value - if kind == 'function' and refid[:9] == 'namespace': + kind = node.attributes["kind"].value + refid = node.attributes["refid"].value + if kind == "function" and refid[:9] == "namespace": self.subnode_parse(node) def do_doxygenindex(self, node): self.multi = 1 - comps = node.getElementsByTagName('compound') + comps = node.getElementsByTagName("compound") for c in comps: - refid = c.attributes['refid'].value - fname = refid + '.xml' + refid = c.attributes["refid"].value + fname = refid + ".xml" if not os.path.exists(fname): - fname = os.path.join(self.my_dir, fname) + fname = os.path.join(self.my_dir, fname) if not self.quiet: print("parsing file: %s" % fname) - p = Doxy2SWIG(fname, - with_function_signature = self.with_function_signature, - with_type_info = self.with_type_info, - with_constructor_list = self.with_constructor_list, - with_attribute_list = self.with_attribute_list, - with_overloaded_functions = self.with_overloaded_functions, - textwidth = self.textwidth, - quiet = self.quiet) + p = Doxy2SWIG( + fname, + with_function_signature=self.with_function_signature, + with_type_info=self.with_type_info, + with_constructor_list=self.with_constructor_list, + with_attribute_list=self.with_attribute_list, + with_overloaded_functions=self.with_overloaded_functions, + textwidth=self.textwidth, + quiet=self.quiet, + ) p.generate() self.pieces.extend(p.pieces) + # MARK: main def main(): usage = __doc__ parser = optparse.OptionParser(usage) - parser.add_option("-f", '--function-signature', - action='store_true', - default=False, - dest='f', - help='include function signature in the documentation. This is handy when not using swig auto-generated function definitions %feature("autodoc", [0,1])') - parser.add_option("-t", '--type-info', - action='store_true', - default=False, - dest='t', - help='include type information for arguments in function signatures. This is similar to swig autodoc level 1') - parser.add_option("-c", '--constructor-list', - action='store_true', - default=False, - dest='c', - help='generate a constructor list for class documentation. Useful for target languages where the object construction should be documented in the class documentation.') - parser.add_option("-a", '--attribute-list', - action='store_true', - default=False, - dest='a', - help='generate an attributes list for class documentation. Useful for target languages where class attributes should be documented in the class documentation.') - parser.add_option("-o", '--overloaded-functions', - action='store_true', - default=False, - dest='o', - help='collect all documentation for overloaded functions. Useful for target languages that have no concept of overloaded functions, but also to avoid having to attach the correct docstring to each function overload manually') - parser.add_option("-w", '--width', type="int", - action='store', - dest='w', - default=80, - help='textwidth for wrapping (default: 80). Note that the generated lines may include 2 additional spaces (for markdown).') - parser.add_option("-q", '--quiet', - action='store_true', - default=False, - dest='q', - help='be quiet and minimize output') - + parser.add_option( + "-f", + "--function-signature", + action="store_true", + default=False, + dest="f", + help='include function signature in the documentation. This is handy when not using swig auto-generated function definitions %feature("autodoc", [0,1])', + ) + parser.add_option( + "-t", + "--type-info", + action="store_true", + default=False, + dest="t", + help="include type information for arguments in function signatures. This is similar to swig autodoc level 1", + ) + parser.add_option( + "-c", + "--constructor-list", + action="store_true", + default=False, + dest="c", + help="generate a constructor list for class documentation. Useful for target languages where the object construction should be documented in the class documentation.", + ) + parser.add_option( + "-a", + "--attribute-list", + action="store_true", + default=False, + dest="a", + help="generate an attributes list for class documentation. Useful for target languages where class attributes should be documented in the class documentation.", + ) + parser.add_option( + "-o", + "--overloaded-functions", + action="store_true", + default=False, + dest="o", + help="collect all documentation for overloaded functions. Useful for target languages that have no concept of overloaded functions, but also to avoid having to attach the correct docstring to each function overload manually", + ) + parser.add_option( + "-w", + "--width", + type="int", + action="store", + dest="w", + default=80, + help="textwidth for wrapping (default: 80). Note that the generated lines may include 2 additional spaces (for markdown).", + ) + parser.add_option( + "-q", + "--quiet", + action="store_true", + default=False, + dest="q", + help="be quiet and minimize output", + ) + options, args = parser.parse_args() if len(args) != 2: parser.error("no input and output specified") - - p = Doxy2SWIG(args[0], - with_function_signature = options.f, - with_type_info = options.t, - with_constructor_list = options.c, - with_attribute_list = options.a, - with_overloaded_functions = options.o, - textwidth = options.w, - quiet = options.q) + + p = Doxy2SWIG( + args[0], + with_function_signature=options.f, + with_type_info=options.t, + with_constructor_list=options.c, + with_attribute_list=options.a, + with_overloaded_functions=options.o, + textwidth=options.w, + quiet=options.q, + ) p.generate() p.write(args[1]) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/bindings/python/factory.py b/bindings/python/factory.py index 7f0e5535e..65c9d7b50 100644 --- a/bindings/python/factory.py +++ b/bindings/python/factory.py @@ -31,6 +31,7 @@ class FactoryError(Exception): """Base exception for factory errors.""" + class IncompatibleClassError(FactoryError): """Raised if an extended class is not compatible with its dlite metadata.""" @@ -233,12 +234,16 @@ def objectfactory( A new, extended copy of the Python object `obj`. """ - cls = cls if cls is not None else classfactory( - obj.__class__, - meta=meta, - url=url, - storage=storage, - id=id, + cls = ( + cls + if cls is not None + else classfactory( + obj.__class__, + meta=meta, + url=url, + storage=storage, + id=id, + ) ) new = copy.deepcopy(obj) if deepcopy else copy.copy(obj) new.__class__ = cls diff --git a/bindings/python/mappings.py b/bindings/python/mappings.py index 529518111..b28cd3221 100644 --- a/bindings/python/mappings.py +++ b/bindings/python/mappings.py @@ -55,6 +55,7 @@ class MissingRelationError(MappingError): class StepType(Enum): """Type of mapping step when going from the output to the inputs.""" + MAPSTO = 1 INV_MAPSTO = -1 INSTANCEOF = 2 @@ -75,10 +76,11 @@ class Value: instance of. cost: Cost of accessing this value. """ + def __init__( self, value: "Any", - unit: "Optional[str]" =None, + unit: "Optional[str]" = None, iri: "Optional[str]" = None, property_iri: "Optional[str]" = None, cost: "Any | Callable" = 0.0, @@ -136,6 +138,7 @@ class MappingStep: The arguments can also be assigned as attributes. """ + def __init__( self, output_iri: "Optional[str]" = None, @@ -213,14 +216,14 @@ def eval( """ if routeno is None: - (_, routeno), = self.lowest_costs(nresults=1) + ((_, routeno),) = self.lowest_costs(nresults=1) inputs, idx = self.get_inputs(routeno) values = get_values(inputs, idx, quantity=quantity) if self.function: value = self.function(**values) elif len(values) == 1: - value, = values.values() + (value,) = values.values() else: raise TypeError( f"Expected inputs to be a single argument: {values}" @@ -267,7 +270,9 @@ def get_input_iris(self, routeno: int) -> dict[str, str]: """ inputs, _ = self.get_inputs(routeno) return { - key: value.output_iri if isinstance(value, MappingStep) else value.iri + key: value.output_iri + if isinstance(value, MappingStep) + else value.iri for key, value in inputs.items() } @@ -299,7 +304,6 @@ def lowest_costs(self, nresults: int = 5) -> list: # Loop over all toplevel routes leading into this mapping step for inputs in self.input_routes: - # For each route, loop over all input arguments of this step # The number of permutations we must consider is the product # of the total number of routes to each input argument. @@ -314,15 +318,23 @@ def lowest_costs(self, nresults: int = 5) -> list: # store them in an array with two columns: `cost` and `routeno`. # The `results` list is extended with the cost array # for each toplevel route leading into this step. - base = np.rec.fromrecords([(0.0, 0)], names='cost,routeno', - formats='f8,i8') + base = np.rec.fromrecords( + [(0.0, 0)], names="cost,routeno", formats="f8,i8" + ) m = 1 for input in inputs.values(): if isinstance(input, MappingStep): nroutes = input.number_of_routes() - res = np.rec.fromrecords([row for row in sorted( - input.lowest_costs(nresults=nresults), - key=lambda x: x[1])], dtype=base.dtype) + res = np.rec.fromrecords( + [ + row + for row in sorted( + input.lowest_costs(nresults=nresults), + key=lambda x: x[1], + ) + ], + dtype=base.dtype, + ) res1 = res.repeat(len(base)) base = np.tile(base, len(res)) base.cost += res1.cost @@ -377,7 +389,10 @@ def show( res = [] ind = " " * indent res.append(ind + f"{name if name else 'Step'}:") - res.append(ind + f" steptype: {self.steptype.name if self.steptype else None}") + res.append( + ind + + f" steptype: {self.steptype.name if self.steptype else None}" + ) res.append(ind + f" output_iri: {self.output_iri}") res.append(ind + f" output_unit: {self.output_unit}") res.append(ind + f" cost: {self.cost}") @@ -451,7 +466,9 @@ def get_values( value = input_value.eval(routeno=routeno, quantity=quantity) values[key] = ( value.to(input_value.output_unit) - if input_value.output_unit and isinstance(input_value, quantity) else value + if input_value.output_unit + and isinstance(input_value, quantity) + else value ) elif isinstance(input_value, Value): values[key] = quantity(input_value.value, input_value.unit) @@ -525,25 +542,25 @@ def fno_mapper(triplestore: "Triplestore") -> defaultdict: def mapping_route( - target: str, - sources: dict, - triplestore: "Triplestore", - function_repo: "Optional[dict[str, Callable]]" = None, - function_mappers: "Sequence[Callable]" = (fno_mapper, ), - default_costs: dict[str, float] = { - "function": 10.0, - "mapsTo": 2.0, - "instanceOf": 1.0, - "subClassOf": 1.0, - "value": 0.0, - }, - mapsTo: str = MAP.mapsTo, - instanceOf: str = DM.instanceOf, - subClassOf: str = RDFS.subClassOf, - #description: str = DCTERMS.description, - label: str = RDFS.label, - hasUnit: str = DM.hasUnit, - hasCost: str = DM.hasCost, # TODO - add hasCost to the DM ontology + target: str, + sources: dict, + triplestore: "Triplestore", + function_repo: "Optional[dict[str, Callable]]" = None, + function_mappers: "Sequence[Callable]" = (fno_mapper,), + default_costs: dict[str, float] = { + "function": 10.0, + "mapsTo": 2.0, + "instanceOf": 1.0, + "subClassOf": 1.0, + "value": 0.0, + }, + mapsTo: str = MAP.mapsTo, + instanceOf: str = DM.instanceOf, + subClassOf: str = RDFS.subClassOf, + # description: str = DCTERMS.description, + label: str = RDFS.label, + hasUnit: str = DM.hasUnit, + hasCost: str = DM.hasCost, # TODO - add hasCost to the DM ontology ) -> MappingStep: """Find routes of mappings from any source in `sources` to `target`. @@ -588,11 +605,11 @@ def mapping_route( # Create lookup tables for fast access to properties # This only transverse `triples` once - soMaps = defaultdict(list) # (s, mapsTo, o) ==> soMaps[s] -> [o, ..] - osMaps = defaultdict(list) # (o, mapsTo, s) ==> osMaps[o] -> [s, ..] + soMaps = defaultdict(list) # (s, mapsTo, o) ==> soMaps[s] -> [o, ..] + osMaps = defaultdict(list) # (o, mapsTo, s) ==> osMaps[o] -> [s, ..] osSubcl = defaultdict(list) # (o, subClassOf, s) ==> osSubcl[o] -> [s, ..] - soInst = {} # (s, instanceOf, o) ==> soInst[s] -> o - osInst = defaultdict(list) # (o, instanceOf, s) ==> osInst[o] -> [s, ..] + soInst = {} # (s, instanceOf, o) ==> soInst[s] -> o + osInst = defaultdict(list) # (o, instanceOf, s) ==> osInst[o] -> [s, ..] for s, o in triplestore.subject_objects(mapsTo): soMaps[s].append(o) osMaps[o].append(s) @@ -620,7 +637,8 @@ def getcost(target, stepname): def walk(target, visited, step): """Walk backward in rdf graph from `node` to sources.""" - if target in visited: return + if target in visited: + return visited.add(target) def addnode(node, steptype, stepname): @@ -629,37 +647,43 @@ def addnode(node, steptype, stepname): step.steptype = steptype step.cost = getcost(target, stepname) if node in sources: - value = Value(value=sources[node], unit=soUnit.get(node), - iri=node, property_iri=soInst.get(node), - cost=getcost(node, 'value')) + value = Value( + value=sources[node], + unit=soUnit.get(node), + iri=node, + property_iri=soInst.get(node), + cost=getcost(node, "value"), + ) step.add_input(value, name=soName.get(node)) else: - prevstep = MappingStep(output_iri=node, - output_unit=soUnit.get(node)) + prevstep = MappingStep( + output_iri=node, output_unit=soUnit.get(node) + ) step.add_input(prevstep, name=soName.get(node)) walk(node, visited, prevstep) for node in osInst[target]: - addnode(node, StepType.INV_INSTANCEOF, 'instanceOf') + addnode(node, StepType.INV_INSTANCEOF, "instanceOf") for node in soMaps[target]: - addnode(node, StepType.MAPSTO, 'mapsTo') + addnode(node, StepType.MAPSTO, "mapsTo") for node in osMaps[target]: addnode(node, StepType.INV_MAPSTO, "mapsTo") for node in osSubcl[target]: - addnode(node, StepType.INV_SUBCLASSOF, 'subClassOf') + addnode(node, StepType.INV_SUBCLASSOF, "subClassOf") for fmap in function_mappers: for func, input_iris in fmap(triplestore)[target]: step.steptype = StepType.FUNCTION - step.cost = getcost(func, 'function') + step.cost = getcost(func, "function") step.function = function_repo[func] step.join_mode = True for i, input_iri in enumerate(input_iris): - step0 = MappingStep(output_iri=input_iri, - output_unit=soUnit.get(input_iri)) + step0 = MappingStep( + output_iri=input_iri, output_unit=soUnit.get(input_iri) + ) step.add_input(step0, name=soName.get(input_iri)) walk(input_iri, visited, step0) step.join_input() @@ -672,7 +696,7 @@ def addnode(node, steptype, stepname): visited.add(target) # do we really wan't this? source = soInst[target] step.steptype = StepType.INSTANCEOF - step.cost = getcost(source, 'instanceOf') + step.cost = getcost(source, "instanceOf") step0 = MappingStep(output_iri=source, output_unit=soUnit.get(source)) step.add_input(step0, name=soName.get(target)) step = step0 @@ -716,13 +740,15 @@ def instance_routes( sources = {} for inst in instances: - props = {prop.name: prop for prop in inst.meta['properties']} + props = {prop.name: prop for prop in inst.meta["properties"]} for key, value in inst.properties.items(): - sources[f'{inst.meta.uri}#{key}'] = quantity(value, props[key].unit) + sources[f"{inst.meta.uri}#{key}"] = quantity( + value, props[key].unit + ) routes = {} - for prop in meta['properties']: - target = f'{meta.uri}#{prop.name}' + for prop in meta["properties"]: + target = f"{meta.uri}#{prop.name}" try: route = mapping_route(target, sources, triplestore, **kwargs) except MissingRelationError: @@ -730,7 +756,7 @@ def instance_routes( continue raise if not allow_incomplete and not route.number_of_routes(): - raise InsufficientMappingError(f'No mappings for {target}') + raise InsufficientMappingError(f"No mappings for {target}") routes[prop.name] = route return routes @@ -767,7 +793,7 @@ def instantiate_from_routes( routedict = routedict or {} values = {} - for prop in meta['properties']: + for prop in meta["properties"]: if prop.name in routes: step = routes[prop.name] values[prop.name] = step.eval( @@ -882,10 +908,10 @@ def instantiate_all( triplestore=triplestore, allow_incomplete=allow_incomplete, quantity=quantity, - **kwargs + **kwargs, ) - property_names = [prop.name for prop in meta.properties['properties']] + property_names = [prop.name for prop in meta.properties["properties"]] def routedicts(n: int) -> "Generator[dict[str, int], None, None]": """Recursive help function returning an iterator over all possible @@ -893,7 +919,7 @@ def routedicts(n: int) -> "Generator[dict[str, int], None, None]": if n < 0: yield {} return - for outer in routedicts(n-1): + for outer in routedicts(n - 1): name = property_names[n] if routedict and name in routedict: outer[name] = routedict[name] @@ -906,15 +932,13 @@ def routedicts(n: int) -> "Generator[dict[str, int], None, None]": for route_dict in routedicts(len(property_names) - 1): yield instantiate_from_routes( - meta=meta, - routes=routes, - routedict=route_dict, - quantity=quantity + meta=meta, routes=routes, routedict=route_dict, quantity=quantity ) # ------------- Old implementation ----------------- + def unitconvert_pint(dest_unit: "Any", value: "Any", unit: "Any") -> "Any": """Returns `value` converted to `dest_unit`. @@ -928,6 +952,7 @@ def unitconvert_pint(dest_unit: "Any", value: "Any", unit: "Any") -> "Any": """ import pint + ureg = pint.UnitRegistry() u1 = ureg(unit) u2 = ureg(dest_unit) @@ -969,21 +994,25 @@ def match_factory( """ def match( - s: "Optional[str]" = None, p: "Optional[str]" = None, o: "Optional[str]" = None + s: "Optional[str]" = None, + p: "Optional[str]" = None, + o: "Optional[str]" = None, ) -> "Generator[tuple[str, str, str], None, None]": """Returns generator over all triples that matches (s, p, o).""" return ( - triple for triple in triples + triple + for triple in triples if ( - (s is None or triple[0] == s) and - (p is None or triple[1] == p) and - (o is None or triple[2] == o) + (s is None or triple[0] == s) + and (p is None or triple[1] == p) + and (o is None or triple[2] == o) ) ) if match_first: return lambda s=None, p=None, o=None: next( - iter(match(s, p, o) or ()), (None, None, None)) + iter(match(s, p, o) or ()), (None, None, None) + ) return match @@ -1002,11 +1031,13 @@ def assign_dimensions( propname: Source property name. """ - lst = [prop.dims for prop in inst.meta['properties'] if prop.name == propname] + lst = [ + prop.dims for prop in inst.meta["properties"] if prop.name == propname + ] if not lst: raise MappingError(f"Unexpected property name: {propname}") - src_dims, = lst + (src_dims,) = lst for dim in src_dims: if dim not in dims: raise InconsistentDimensionError(f"Unexpected dimension: {dim}") @@ -1027,7 +1058,7 @@ def make_instance( mappings: "Sequence[tuple[str, str, str]]" = (), strict: bool = True, allow_incomplete: bool = False, - mapsTo: str = ':mapsTo', + mapsTo: str = ":mapsTo", ) -> dlite.Instance: """Create an instance of `meta` using data found in `*instances`. @@ -1080,15 +1111,15 @@ def make_instance( if isinstance(instances, dlite.Instance): instances = [instances] - dims = {dim.name: None for dim in meta['dimensions']} + dims = {dim.name: None for dim in meta["dimensions"]} props = {} - for prop in meta['properties']: - prop_uri = f'{meta.uri}#{prop.name}' + for prop in meta["properties"]: + prop_uri = f"{meta.uri}#{prop.name}" for _, _, o in match(prop_uri, mapsTo, None): for inst in instances: - for prop2 in inst.meta['properties']: - prop2_uri = f'{inst.meta.uri}#{prop2.name}' + for prop2 in inst.meta["properties"]: + prop2_uri = f"{inst.meta.uri}#{prop2.name}" for _ in match(prop2_uri, mapsTo, o): value = inst[prop2.name] if prop.name not in props: diff --git a/bindings/python/options.py b/bindings/python/options.py index f90e165c4..b12a96c4e 100644 --- a/bindings/python/options.py +++ b/bindings/python/options.py @@ -16,25 +16,26 @@ class Options(dict): Options may also be accessed as attributes. """ + def __init__(self, options, defaults=None): dict.__init__(self) if options is None: - options = '' - options = options.split('#')[0] # strip hash and everything following + options = "" + options = options.split("#")[0] # strip hash and everything following if isinstance(defaults, str): defaults = Options(defaults) if defaults: self.update(defaults) - if ';' in options: - tokens = options.split(';') - elif '&' in options: - tokens = options.split('&') + if ";" in options: + tokens = options.split(";") + elif "&" in options: + tokens = options.split("&") else: tokens = [options] - if tokens and tokens != ['']: - self.update([t.split('=', 1) for t in tokens]) + if tokens and tokens != [""]: + self.update([t.split("=", 1) for t in tokens]) def __getattr__(self, name): if name in self: diff --git a/bindings/python/rdf.py b/bindings/python/rdf.py index a68a9ce70..677db4791 100644 --- a/bindings/python/rdf.py +++ b/bindings/python/rdf.py @@ -43,8 +43,9 @@ def _get_uri(inst, base_uri): def _value(graph, subject=None, predicate=None, object=None, **kwargs): """Wrapper around rdflib.Graph.value() that raises an exception if the value is missing.""" - value = graph.value(subject=subject, predicate=predicate, object=object, - **kwargs) + value = graph.value( + subject=subject, predicate=predicate, object=object, **kwargs + ) if not value: raise ValueError( f"missing value for subject={subject}, predicate={predicate}, " @@ -58,7 +59,9 @@ def _ref(value): return URIRef(value) if _is_valid_uri(value) else Literal(value) -def to_graph(inst, graph=None, base_uri='', base_prefix=None, include_meta=None): +def to_graph( + inst, graph=None, base_uri="", base_prefix=None, include_meta=None +): """Serialise DLite instance to a rdflib Graph object. Arguments: @@ -82,7 +85,8 @@ def to_graph(inst, graph=None, base_uri='', base_prefix=None, include_meta=None) if base_uri and base_uri[-1] not in "#/": base_uri += "/" if base_prefix is not None and base_uri not in [ - str(v) for _, v in graph.namespaces()]: + str(v) for _, v in graph.namespaces() + ]: graph.bind(base_prefix, base_uri) this = URIRef(_get_uri(inst, base_uri)) @@ -105,23 +109,35 @@ def to_graph(inst, graph=None, base_uri='', base_prefix=None, include_meta=None) # Add type (PropertyInstance)? graph.add((prop, DM.instanceOf, URIRef(f"{inst.meta.uri}#{k}"))) graph.add((prop, DM.hasLabel, Literal(k))) - graph.add((prop, DM.hasValue, - Literal(inst.get_property_as_string(k, flags=1)))) + graph.add( + ( + prop, + DM.hasValue, + Literal(inst.get_property_as_string(k, flags=1)), + ) + ) else: - graph.add((this, RDF.type, - DM.Entity if inst.meta.uri == dlite.ENTITY_SCHEMA - else DM.Metadata)) + graph.add( + ( + this, + RDF.type, + DM.Entity + if inst.meta.uri == dlite.ENTITY_SCHEMA + else DM.Metadata, + ) + ) if inst.description: - graph.add((this, DM.hasDescription, - Literal(inst.description, lang="en"))) + graph.add( + (this, DM.hasDescription, Literal(inst.description, lang="en")) + ) for d in inst.properties["dimensions"]: dim = URIRef(this + sep + d.name) graph.add((this, DM.hasDimension, dim)) graph.add((dim, RDF.type, DM.Dimension)) - graph.add((dim, DM.hasDescription, - Literal(d.description, lang="en"))) - graph.add((dim, DM.hasLabel, - Literal(d.name, lang="en"))) + graph.add( + (dim, DM.hasDescription, Literal(d.description, lang="en")) + ) + graph.add((dim, DM.hasLabel, Literal(d.name, lang="en"))) for p in inst.properties["properties"]: prop = URIRef(this + sep + p.name) graph.add((this, DM.hasProperty, prop)) @@ -147,8 +163,16 @@ def to_graph(inst, graph=None, base_uri='', base_prefix=None, include_meta=None) return graph -def to_rdf(inst, destination=None, format="turtle", base_uri='', - base_prefix=None, include_meta=None, decode=True, **kwargs): +def to_rdf( + inst, + destination=None, + format="turtle", + base_uri="", + base_prefix=None, + include_meta=None, + decode=True, + **kwargs, +): """Serialise DLite instance to string. Arguments: @@ -170,8 +194,12 @@ def to_rdf(inst, destination=None, format="turtle", base_uri='', The serialised instance if `destination is not None. If `decode` is true, a string is returned, otherwise a bytes object. """ - graph = to_graph(inst, base_uri=base_uri, base_prefix=base_prefix, - include_meta=include_meta) + graph = to_graph( + inst, + base_uri=base_uri, + base_prefix=base_prefix, + include_meta=include_meta, + ) if isinstance(destination, pathlib.PurePath): destination = str(destination) s = graph.serialize(destination=destination, format=format, **kwargs) @@ -203,7 +231,8 @@ def from_graph(graph, id=None): pass else: raise ValueError( - "id must be given when graph has move than one entity") + "id must be given when graph has move than one entity" + ) elif _is_valid_uri(id): rdfid = URIRef(id) else: @@ -219,7 +248,8 @@ def from_graph(graph, id=None): rdfid = v uuid = id dlite_id = ( - rdfid.split("#", 1)[-1] if "#" in rdfid + rdfid.split("#", 1)[-1] + if "#" in rdfid else rdfid.rsplit("/", 1)[-1] ) rdfid = URIRef(rdfid) @@ -229,8 +259,10 @@ def from_graph(graph, id=None): if uuid: if dlite.get_uuid(dlite_id) != str(uuid): if dlite.get_uuid(dlite_id) != uuid: - raise ValueError(f"provided id \"{id}\" does not correspond " - f"to uuid \"{uuid}\"") + raise ValueError( + f'provided id "{id}" does not correspond ' + f'to uuid "{uuid}"' + ) else: uuid = dlite.get_uuid(dlite_id) @@ -279,9 +311,12 @@ def from_graph(graph, id=None): description=graph.value(rdfid, DM.hasDescription), ) else: - dims = {str(_value(graph, dim, DM.hasLabel)): - int(_value(graph, dim, DM.hasValue)) - for dim in dimensions} + dims = { + str(_value(graph, dim, DM.hasLabel)): int( + _value(graph, dim, DM.hasValue) + ) + for dim in dimensions + } inst = dlite.Instance.from_metaid(meta.uri, dims, id=dlite_id) for prop in properties: label = _value(graph, prop, DM.hasLabel) @@ -291,8 +326,16 @@ def from_graph(graph, id=None): return inst -def from_rdf(source=None, location=None, file=None, data=None, - format=None, id=None, publicID=PUBLIC_ID, **kwargs): +def from_rdf( + source=None, + location=None, + file=None, + data=None, + format=None, + id=None, + publicID=PUBLIC_ID, + **kwargs, +): """Instantiate DLite instance from RDF. The source is specified using one of `source`, `location`, `file` or `data`. @@ -322,6 +365,13 @@ def from_rdf(source=None, location=None, file=None, data=None, source = str(source) if format is None: format = guess_format(source) - graph.parse(source=source, location=location, file=file, data=data, - format=format, publicID=publicID, **kwargs) + graph.parse( + source=source, + location=location, + file=file, + data=data, + format=format, + publicID=publicID, + **kwargs, + ) return from_graph(graph, id=id) diff --git a/bindings/python/tests/CMakeLists.txt b/bindings/python/tests/CMakeLists.txt index 1d7678413..ec0a357f9 100644 --- a/bindings/python/tests/CMakeLists.txt +++ b/bindings/python/tests/CMakeLists.txt @@ -9,6 +9,7 @@ set(tests test_misc test_python_storage test_storage + test_storage_plugins test_paths test_utils test_global_dlite_state diff --git a/bindings/python/tests/__main__.py b/bindings/python/tests/__main__.py index 760ae2b9a..5f5896a67 100644 --- a/bindings/python/tests/__main__.py +++ b/bindings/python/tests/__main__.py @@ -1,2 +1,3 @@ import test_python_bindings + test_python_bindings.test() diff --git a/bindings/python/tests/check_import.py b/bindings/python/tests/check_import.py index f09f0ee85..8ce062db8 100644 --- a/bindings/python/tests/check_import.py +++ b/bindings/python/tests/check_import.py @@ -1,4 +1,3 @@ - import sys import importlib import warnings @@ -24,7 +23,6 @@ def check_import(module_name, package=None, skip=False, warn=None): if skip: sys.exit(44) # tell CMake to skip the test elif warn or warn is None: - warnings.warn(f'cannot load module: "{module_name}"', - stacklevel=2) + warnings.warn(f'cannot load module: "{module_name}"', stacklevel=2) return None return module diff --git a/bindings/python/tests/global_dlite_state_mod1.py b/bindings/python/tests/global_dlite_state_mod1.py index 9a2b3d38d..23287b924 100644 --- a/bindings/python/tests/global_dlite_state_mod1.py +++ b/bindings/python/tests/global_dlite_state_mod1.py @@ -1,7 +1,7 @@ import importlib import dlite + def assert_exists_in_module(uuid): assert dlite.has_instance(uuid) assert uuid in dlite.istore_get_uuids() - diff --git a/bindings/python/tests/global_dlite_state_mod2.py b/bindings/python/tests/global_dlite_state_mod2.py index a775c2bf2..1476774cb 100644 --- a/bindings/python/tests/global_dlite_state_mod2.py +++ b/bindings/python/tests/global_dlite_state_mod2.py @@ -9,14 +9,14 @@ thisdir = os.path.abspath(os.path.dirname(__file__)) -url = 'json://' + thisdir + '/MyEntity.json' +url = "json://" + thisdir + "/MyEntity.json" # myentity is already defined via test_global_dlite_state, no new instance is added to istore myentity = Instance.from_url(url) assert myentity.uri == "http://onto-ns.com/meta/0.1/MyEntity" assert len(dlite.istore_get_uuids()) == 3 + 3 -i1 = Instance.from_metaid(myentity.uri, [2, 3], 'myid') +i1 = Instance.from_metaid(myentity.uri, [2, 3], "myid") assert i1.uri == "myid" assert i1.uuid in dlite.istore_get_uuids() assert len(dlite.istore_get_uuids()) == 3 + 4 diff --git a/bindings/python/tests/global_dlite_state_mod3.py b/bindings/python/tests/global_dlite_state_mod3.py index 3517b7221..3ec65916a 100644 --- a/bindings/python/tests/global_dlite_state_mod3.py +++ b/bindings/python/tests/global_dlite_state_mod3.py @@ -6,7 +6,5 @@ assert len(dlite.istore_get_uuids()) == 3 + 4 -coll=dlite.Collection() +coll = dlite.Collection() assert len(dlite.istore_get_uuids()) == 3 + 5 - - diff --git a/bindings/python/tests/global_dlite_state_mod4.py b/bindings/python/tests/global_dlite_state_mod4.py index 8e5113d6e..e3b7584a1 100644 --- a/bindings/python/tests/global_dlite_state_mod4.py +++ b/bindings/python/tests/global_dlite_state_mod4.py @@ -6,7 +6,5 @@ assert len(dlite.istore_get_uuids()) == 3 + 5 -coll=dlite.Collection() +coll = dlite.Collection() assert len(dlite.istore_get_uuids()) == 3 + 6 - - diff --git a/bindings/python/tests/test_collection.py b/bindings/python/tests/test_collection.py index c54eff10a..e8a55e1d2 100755 --- a/bindings/python/tests/test_collection.py +++ b/bindings/python/tests/test_collection.py @@ -5,57 +5,58 @@ import dlite thisdir = Path(__file__).resolve().parent -outdir = thisdir / 'output' +outdir = thisdir / "output" # Create collection -coll = dlite.Collection('mycoll') +coll = dlite.Collection("mycoll") # Add relations -coll.add_relation('cat', 'is-a', 'animal') -coll.add_relation('dog', 'is-a', 'animal') -rel = coll.get_first_relation('dog') -assert rel.s == 'dog' -rel = coll.get_first_relation(p='is-a') -assert rel.s == 'cat' +coll.add_relation("cat", "is-a", "animal") +coll.add_relation("dog", "is-a", "animal") +rel = coll.get_first_relation("dog") +assert rel.s == "dog" +rel = coll.get_first_relation(p="is-a") +assert rel.s == "cat" assert coll.nrelations == 2 -rel = coll.get_first_relation(s='no-such-subject') +rel = coll.get_first_relation(s="no-such-subject") assert rel is None # Create instances -url = f'json://{thisdir}/MyEntity.json?mode=r' +url = f"json://{thisdir}/MyEntity.json?mode=r" e = dlite.Instance.from_url(url) inst1 = dlite.Instance.from_metaid(e.uri, [3, 2]) -inst2 = dlite.Instance.from_metaid(e.uri, (3, 4), 'myinst') +inst2 = dlite.Instance.from_metaid(e.uri, (3, 4), "myinst") # Add instances -coll.add('inst1', inst1) -coll.add('inst2', inst2) +coll.add("inst1", inst1) +coll.add("inst2", inst2) assert len(coll) == 2 -assert coll.has('inst1') -assert not coll.has('inst3') +assert coll.has("inst1") +assert not coll.has("inst3") assert coll.has_id(inst2.uuid) assert coll.has_id(inst2.uri) -assert not coll.has_id('non-existing-id') +assert not coll.has_id("non-existing-id") # Save -with dlite.Storage('json', outdir / 'coll0.json', 'mode=w') as s: +with dlite.Storage("json", outdir / "coll0.json", "mode=w") as s: coll.save(s) -coll.save('json', outdir / 'coll1.json', 'mode=w') -coll.save(f'json://{outdir}/coll2.json?mode=w') -coll.save(f'json://{outdir}/coll3.json?mode=w', include_instances=False) +coll.save("json", outdir / "coll1.json", "mode=w") +coll.save(f"json://{outdir}/coll2.json?mode=w") +coll.save(f"json://{outdir}/coll3.json?mode=w", include_instances=False) data = [] for i in range(3): - with open(outdir / f'coll{i}.json') as f: + with open(outdir / f"coll{i}.json") as f: data.append(f.read()) assert data[1] == data[0] assert data[2] == data[0] # Load -with dlite.Storage('json', outdir / 'coll0.json', 'mode=r') as s: - coll0 = dlite.Collection.load(s, id='mycoll') -coll1 = dlite.Collection.load('json', outdir / 'coll1.json', 'mode=r', - id=coll.uuid) -coll2 = dlite.Collection.load(f'json://{outdir}/coll2.json?mode=r#mycoll') +with dlite.Storage("json", outdir / "coll0.json", "mode=r") as s: + coll0 = dlite.Collection.load(s, id="mycoll") +coll1 = dlite.Collection.load( + "json", outdir / "coll1.json", "mode=r", id=coll.uuid +) +coll2 = dlite.Collection.load(f"json://{outdir}/coll2.json?mode=r#mycoll") assert coll0 == coll assert coll1 == coll assert coll2 == coll @@ -67,75 +68,82 @@ # Remove relation assert coll.nrelations == 8 -coll.remove_relations('cat') +coll.remove_relations("cat") assert coll.nrelations == 7 -rel = coll.get_first_relation('dog') -assert rel.s == 'dog' +rel = coll.get_first_relation("dog") +assert rel.s == "dog" # Remove instance assert len(coll) == 2 -coll.remove('inst2') +coll.remove("inst2") assert len(coll) == 1 -inst1b = coll.get('inst1') +inst1b = coll.get("inst1") assert inst1b == inst1 assert inst1b != inst2 # Cannot add an instance with an existing label try: - coll.add('inst1', inst2) + coll.add("inst1", inst2) except dlite.DLiteError: pass else: - raise RuntimeError('should not be able to replace an existing instance') + raise RuntimeError("should not be able to replace an existing instance") -coll.add('inst1', inst2, force=True) # forced replacement -assert coll.get('inst1') == inst2 -coll.add('inst1', inst1, force=True) # revert -assert coll.get('inst1') == inst1 +coll.add("inst1", inst2, force=True) # forced replacement +assert coll.get("inst1") == inst2 +coll.add("inst1", inst1, force=True) # revert +assert coll.get("inst1") == inst1 # Test convinience functions i1 = coll.get_id(inst1.uuid) assert i1 == inst1 -assert coll.has('inst1') is True -assert coll.has('inst2') is False -assert coll.has('animal') is False +assert coll.has("inst1") is True +assert coll.has("inst2") is False +assert coll.has("animal") is False rel = coll.get_first_relation() -assert rel.s == 'dog' -assert rel.p == 'is-a' -assert rel.o == 'animal' -rel = coll.get_first_relation(p='_has-meta') -assert rel.s == 'inst1' -assert rel.p == '_has-meta' -assert rel.o == 'http://onto-ns.com/meta/0.1/MyEntity' - -i1, = coll.get_instances() +assert rel.s == "dog" +assert rel.p == "is-a" +assert rel.o == "animal" +rel = coll.get_first_relation(p="_has-meta") +assert rel.s == "inst1" +assert rel.p == "_has-meta" +assert rel.o == "http://onto-ns.com/meta/0.1/MyEntity" + +(i1,) = coll.get_instances() assert i1 == inst1 # We have no collections in the collection assert not list(coll.get_instances(metaid=dlite.COLLECTION_ENTITY)) -i1, = coll.get_instances(metaid='http://onto-ns.com/meta/0.1/MyEntity') +(i1,) = coll.get_instances(metaid="http://onto-ns.com/meta/0.1/MyEntity") assert i1 == inst1 -label1, = coll.get_labels() -assert label1 == 'inst1' +(label1,) = coll.get_labels() +assert label1 == "inst1" rels = list(coll.get_relations()) assert len(rels) == 4 -rels = list(coll.get_relations(p='_is-a')) +rels = list(coll.get_relations(p="_is-a")) assert len(rels) == 1 -rels = list(coll.get_relations(p='_xxx')) +rels = list(coll.get_relations(p="_xxx")) assert len(rels) == 0 -assert list(coll.get_subjects()) == ['dog', 'inst1', 'inst1', 'inst1'] +assert list(coll.get_subjects()) == ["dog", "inst1", "inst1", "inst1"] assert list(coll.get_predicates()) == [ - 'is-a', '_is-a', '_has-uuid', '_has-meta'] + "is-a", + "_is-a", + "_has-uuid", + "_has-meta", +] assert list(coll.get_objects()) == [ - 'animal', 'Instance', inst1.uuid, inst1.meta.uri] - + "animal", + "Instance", + inst1.uuid, + inst1.meta.uri, +] # String representation diff --git a/bindings/python/tests/test_datamodel.py b/bindings/python/tests/test_datamodel.py index 61f6c00de..ec33df6bd 100644 --- a/bindings/python/tests/test_datamodel.py +++ b/bindings/python/tests/test_datamodel.py @@ -5,19 +5,25 @@ from dlite.datamodel import DataModel -datamodel = DataModel('http://onto-ns/meta/0.1/Atoms') -datamodel.description = 'A test entity for atoms...' -datamodel.add_dimension('natoms', 'Number of atoms.') -datamodel.add_dimension('ncoords', 'Number of coordinates (always 3).') -datamodel.add_dimension('nvecs', 'Number of lattice vectors (always 3).') -datamodel.add_property('symbol', 'string', ['natoms'], - description='Chemical symbol of each atom.') -datamodel.add_property('positions', 'float', ['natoms', 'ncoords'], - description='Position of each atom.') -datamodel.add_property('unitcell', 'float', ['nvecs', 'ncoords'], - description='Unit cell.') +datamodel = DataModel("http://onto-ns/meta/0.1/Atoms") +datamodel.description = "A test entity for atoms..." +datamodel.add_dimension("natoms", "Number of atoms.") +datamodel.add_dimension("ncoords", "Number of coordinates (always 3).") +datamodel.add_dimension("nvecs", "Number of lattice vectors (always 3).") +datamodel.add_property( + "symbol", "string", ["natoms"], description="Chemical symbol of each atom." +) +datamodel.add_property( + "positions", + "float", + ["natoms", "ncoords"], + description="Position of each atom.", +) +datamodel.add_property( + "unitcell", "float", ["nvecs", "ncoords"], description="Unit cell." +) Atoms = datamodel.get() -#atoms = Atoms(dims=[2, 3, 3]) +# atoms = Atoms(dims=[2, 3, 3]) atoms = Atoms(dims=dict(nvecs=3, ncoords=3, natoms=2)) diff --git a/bindings/python/tests/test_entity.py b/bindings/python/tests/test_entity.py index 8c5cb1853..6eb85ef5f 100755 --- a/bindings/python/tests/test_entity.py +++ b/bindings/python/tests/test_entity.py @@ -10,6 +10,7 @@ try: import pytest + HAVE_PYTEST = True except ModuleNotFoundError: HAVE_PYTEST = False @@ -17,7 +18,7 @@ thisdir = os.path.abspath(os.path.dirname(__file__)) -url = 'json://' + thisdir + '/MyEntity.json' +url = "json://" + thisdir + "/MyEntity.json" # Load metadata (i.e. an instance of meta-metadata) from url @@ -25,29 +26,29 @@ print(myentity.uuid) # Check some properties of the entity -assert myentity.uuid == 'a0e63529-3397-5c4f-a56c-14bf07ecc219' -assert myentity.uri == 'http://onto-ns.com/meta/0.1/MyEntity' -assert myentity.dimensions == {'ndimensions': 2, 'nproperties': 14} +assert myentity.uuid == "a0e63529-3397-5c4f-a56c-14bf07ecc219" +assert myentity.uri == "http://onto-ns.com/meta/0.1/MyEntity" +assert myentity.dimensions == {"ndimensions": 2, "nproperties": 14} assert not myentity.is_data assert myentity.is_meta assert not myentity.is_metameta # Store the entity to a new file -myentity.save('json://xxx.json?mode=w') +myentity.save("json://xxx.json?mode=w") # Try to overwrite without mode - should fail because metadata is immutable try: - myentity.save('json://xxx.json') + myentity.save("json://xxx.json") except dlite.DLiteError: pass else: - assert False, 'overwriting single-entity formatted file' + assert False, "overwriting single-entity formatted file" # Create an instance of `myentity` with dimensions 2, 3 # For convinience, we give it an unique label "myid" that can be used # interchangable with its uuid -inst = Instance.from_metaid(myentity.uri, [2, 3], 'myid') -assert inst.dimensions == {'N': 2, 'M': 3} +inst = Instance.from_metaid(myentity.uri, [2, 3], "myid") +assert inst.dimensions == {"N": 2, "M": 3} assert inst.is_data assert not inst.is_meta assert not inst.is_metameta @@ -56,84 +57,86 @@ assert inst.uuid in dlite.istore_get_uuids() # Assign properties -inst['a-blob'] = bytearray(b'0123456789abcdef') -inst['a-blob'] = b'0123456789abcdef' -inst['a-blob-array'] = [[b'abcd', '00112233'], [np.int32(42), b'xyz_']] -inst['a-blob-array'] = [[b'0123', b'4567'], [b'89ab', b'cdef']] -inst['a-bool'] = False -inst['a-bool-array'] = True, False -inst['an-int'] = 42 -inst['an-int-array'] = 1, 2, 3 -inst['a-float'] = 42.3 -inst['a-float64-array'] = 3.14, 5.0, 42.3 -inst['a-fixstring'] = 'something' -inst['a-fixstring-array'] = [['Al', 'X'], ['Mg', 'Si']] -inst['a-string'] = 'Hello!' -inst['a-string-array'] = [['a', 'b', 'c'], ['dd', 'eee', 'ffff']] -inst['a-relation'] = dlite.Relation('dog', 'is_a', 'mammal') -inst['a-relation'] = ['dog', 'is_a', 'mammal'] -inst['a-relation'] = dict(s='dog', p='is_a', o='mammal') -inst['a-relation-array'] = [ - ('cheep', 'is_a', 'mammal'), - dlite.Relation('cat', 'is_a', 'mammal'), - ] +inst["a-blob"] = bytearray(b"0123456789abcdef") +inst["a-blob"] = b"0123456789abcdef" +inst["a-blob-array"] = [[b"abcd", "00112233"], [np.int32(42), b"xyz_"]] +inst["a-blob-array"] = [[b"0123", b"4567"], [b"89ab", b"cdef"]] +inst["a-bool"] = False +inst["a-bool-array"] = True, False +inst["an-int"] = 42 +inst["an-int-array"] = 1, 2, 3 +inst["a-float"] = 42.3 +inst["a-float64-array"] = 3.14, 5.0, 42.3 +inst["a-fixstring"] = "something" +inst["a-fixstring-array"] = [["Al", "X"], ["Mg", "Si"]] +inst["a-string"] = "Hello!" +inst["a-string-array"] = [["a", "b", "c"], ["dd", "eee", "ffff"]] +inst["a-relation"] = dlite.Relation("dog", "is_a", "mammal") +inst["a-relation"] = ["dog", "is_a", "mammal"] +inst["a-relation"] = dict(s="dog", p="is_a", o="mammal") +inst["a-relation-array"] = [ + ("cheep", "is_a", "mammal"), + dlite.Relation("cat", "is_a", "mammal"), +] # Print the value of all properties for i in range(len(inst)): - print('prop%d:' % i, inst[i]) + print("prop%d:" % i, inst[i]) # String representation (as json) -#print(inst) +# print(inst) # Check save and load -inst.save('json://inst.json?mode=w') -inst2 = Instance.from_url('json://inst.json') -blob = inst2['a-blob'] +inst.save("json://inst.json?mode=w") +inst2 = Instance.from_url("json://inst.json") +blob = inst2["a-blob"] del inst2 inst2 = Instance.from_url( - 'json://inst.json?mode=r#46a67765-3d8b-5764-9583-3aec59a17983') -assert inst2['a-blob'] == blob + "json://inst.json?mode=r#46a67765-3d8b-5764-9583-3aec59a17983" +) +assert inst2["a-blob"] == blob del inst2 -inst2 = Instance.from_location('json', 'inst.json') -assert inst2['a-blob'] == blob +inst2 = Instance.from_location("json", "inst.json") +assert inst2["a-blob"] == blob del inst2 -inst2 = Instance.from_location('json', 'inst.json', - id='46a67765-3d8b-5764-9583-3aec59a17983') -assert inst2['a-blob'] == blob +inst2 = Instance.from_location( + "json", "inst.json", id="46a67765-3d8b-5764-9583-3aec59a17983" +) +assert inst2["a-blob"] == blob del inst2 -with dlite.Storage('json', 'inst.json') as s: +with dlite.Storage("json", "inst.json") as s: inst2 = dlite.Instance.from_storage(s) -assert inst2['a-blob'] == blob +assert inst2["a-blob"] == blob del inst2 -with dlite.Storage('json', 'inst.json') as s: - inst2 = s.load(id='46a67765-3d8b-5764-9583-3aec59a17983') -assert inst2['a-blob'] == blob +with dlite.Storage("json", "inst.json") as s: + inst2 = s.load(id="46a67765-3d8b-5764-9583-3aec59a17983") +assert inst2["a-blob"] == blob del inst2 # Make sure we fail with an exception for pathetic cases try: - Instance.from_location('json', '/', 'mode=r') + Instance.from_location("json", "/", "mode=r") except dlite.DLiteError: print('*** catched error loading "/" in read mode') try: - Instance.from_location('json', '/', 'mode=w') + Instance.from_location("json", "/", "mode=w") except dlite.DLiteError: print('*** catched error loading "/" in write mode') try: - Instance.from_location('json', '') + Instance.from_location("json", "") except dlite.DLiteError: print('*** catched error loading ""') try: - Instance.from_location('json', 'non-existing-path...') + Instance.from_location("json", "non-existing-path...") except dlite.DLiteError: print('*** catched error loading "non-existing-path..."') @@ -165,57 +168,67 @@ s = pickle.dumps(inst) inst3 = pickle.loads(s) -dim = Dimension('N') +dim = Dimension("N") -prop = Property("a", type='float') +prop = Property("a", type="float") -prop2 = Property("b", type='string10', dims=['I', 'J', 'K'], - description='something enlightening...') +prop2 = Property( + "b", + type="string10", + dims=["I", "J", "K"], + description="something enlightening...", +) assert any(prop2.shape) -props = myentity['properties'] +props = myentity["properties"] props[0] assert inst.meta == myentity -e = dlite.get_instance('http://onto-ns.com/meta/0.1/MyEntity') +e = dlite.get_instance("http://onto-ns.com/meta/0.1/MyEntity") assert e == myentity assert e != inst e2 = Instance.create_metadata( - 'http://onto-ns.com/meta/0.1/NewEntity', - [Dimension('N', 'Number of something')], - [Property('name', type='string', description='Name of something.'), - Property('arr', type='int', dims=['N+2'], description='An array.'), - Property('v', type='double', unit='m/s', description='Velocity')], - 'Something new...') + "http://onto-ns.com/meta/0.1/NewEntity", + [Dimension("N", "Number of something")], + [ + Property("name", type="string", description="Name of something."), + Property("arr", type="int", dims=["N+2"], description="An array."), + Property("v", type="double", unit="m/s", description="Velocity"), + ], + "Something new...", +) e3 = Instance.create_metadata( - 'http://onto-ns.com/meta/0.1/NewEntity2', + "http://onto-ns.com/meta/0.1/NewEntity2", [], - [Property('name', type='string', description='Name of something.'), - Property('arr', type='int', description='An array.'), - Property('v', type='double', unit='m/s', description='Velocity')], - 'Something new...') + [ + Property("name", type="string", description="Name of something."), + Property("arr", type="int", description="An array."), + Property("v", type="double", unit="m/s", description="Velocity"), + ], + "Something new...", +) # Test get_property_as_string() / set_property_from_string() -assert inst.get_property_as_string('an-int-array') == '[1, 2, 3]' -inst.set_property_from_string('an-int-array', '[-1, 5, 6]') -assert inst.get_property_as_string('an-int-array') == '[-1, 5, 6]' +assert inst.get_property_as_string("an-int-array") == "[1, 2, 3]" +inst.set_property_from_string("an-int-array", "[-1, 5, 6]") +assert inst.get_property_as_string("an-int-array") == "[-1, 5, 6]" # Test for issue #502 newinst = e( - dimensions={'N': 2, 'M': 3}, + dimensions={"N": 2, "M": 3}, properties={ - 'a-float': 314, - 'a-string-array': [['a', 'b', 'c'], ['d', 'e', 'f']], + "a-float": 314, + "a-string-array": [["a", "b", "c"], ["d", "e", "f"]], }, - id='newinst', + id="newinst", ) -assert newinst['a-float'] == 314 -assert newinst['a-string-array'].tolist() == [['a', 'b', 'c'], ['d', 'e', 'f']] -assert newinst['an-int'] == 0 +assert newinst["a-float"] == 314 +assert newinst["a-string-array"].tolist() == [["a", "b", "c"], ["d", "e", "f"]] +assert newinst["an-int"] == 0 # Create a new reference assert newinst._refcount == 1 @@ -225,46 +238,46 @@ # Test save -inst.save('json://yyy.json?mode=w') +inst.save("json://yyy.json?mode=w") try: import yaml except ImportError: pass else: - inst.save('yaml://yyy.yaml?mode=w') + inst.save("yaml://yyy.yaml?mode=w") # Test metadata -assert inst.meta.dimnames() == ['N', 'M'] +assert inst.meta.dimnames() == ["N", "M"] assert inst.meta.propnames() == [ - 'a-blob', - 'a-blob-array', - 'a-bool', - 'a-bool-array', - 'an-int', - 'an-int-array', - 'a-float', - 'a-float64-array', - 'a-fixstring', - 'a-fixstring-array', - 'a-string', - 'a-string-array', - 'a-relation', - 'a-relation-array', + "a-blob", + "a-blob-array", + "a-bool", + "a-bool-array", + "an-int", + "an-int-array", + "a-float", + "a-float64-array", + "a-fixstring", + "a-fixstring-array", + "a-string", + "a-string-array", + "a-relation", + "a-relation-array", ] # Test property -prop = inst.meta.getprop('a-blob-array') -assert prop.name == 'a-blob-array' -assert prop.type == 'blob4' -assert prop.shape.tolist() == ['N', 'N'] +prop = inst.meta.getprop("a-blob-array") +assert prop.name == "a-blob-array" +assert prop.type == "blob4" +assert prop.shape.tolist() == ["N", "N"] assert prop.unit == None -assert prop.description == 'A blob array.' +assert prop.description == "A blob array." -prop = dlite.Property('newprop', 'int') -prop.shape = ('a', 'b', 'c') +prop = dlite.Property("newprop", "int") +prop.shape = ("a", "b", "c") assert prop.ndims == 3 if HAVE_PYTEST: with pytest.raises(AttributeError): @@ -277,8 +290,8 @@ # Metadata schema schema = dlite.get_instance(dlite.ENTITY_SCHEMA) -schema.save('entity_schema.json?mode=w;arrays=false') -schema.meta.save('basic_metadata_schema.json?mode=w;arrays=false') +schema.save("entity_schema.json?mode=w;arrays=false") +schema.meta.save("basic_metadata_schema.json?mode=w;arrays=false") -mm = dlite.Instance.from_url('json://entity_schema.json') +mm = dlite.Instance.from_url("json://entity_schema.json") assert mm.uri == dlite.ENTITY_SCHEMA diff --git a/bindings/python/tests/test_factory.py b/bindings/python/tests/test_factory.py index 9c48f3e7c..67636849f 100755 --- a/bindings/python/tests/test_factory.py +++ b/bindings/python/tests/test_factory.py @@ -16,25 +16,25 @@ def __init__(self, name, age, skills): self.skills = skills def __repr__(self): - return 'Person(%r, %r, %r)' % (self.name, self.age, list(self.skills)) + return "Person(%r, %r, %r)" % (self.name, self.age, list(self.skills)) -url = f'json://{thisdir}/Person.json' +url = f"json://{thisdir}/Person.json" -print('-- create: ExPerson') +print("-- create: ExPerson") ExPerson = dlite.classfactory(Person, url=url) -print('-- create: person1') -person1 = Person('Jack Daniel', 42, ['distilling', 'tasting']) +print("-- create: person1") +person1 = Person("Jack Daniel", 42, ["distilling", "tasting"]) -print('-- create: person2') -person2 = ExPerson('Jack Daniel', 42, ['distilling', 'tasting']) -person2.dlite_inst.save('json', 'persons.json', 'mode=w') +print("-- create: person2") +person2 = ExPerson("Jack Daniel", 42, ["distilling", "tasting"]) +person2.dlite_inst.save("json", "persons.json", "mode=w") # Print json-representation of person2 using dlite print(person2.dlite_inst.asjson(indent=2)) -inst = dlite.Instance.from_url('json://persons.json') +inst = dlite.Instance.from_url("json://persons.json") person3 = dlite.instancefactory(Person, inst) person4 = dlite.objectfactory(person1, meta=person2.dlite_meta) @@ -43,6 +43,7 @@ def __repr__(self): # Test for issue #523 import numpy as np + class Atoms: def __init__(self, symbols, positions, masses, energy): self.name = "Atoms" @@ -51,15 +52,16 @@ def __init__(self, symbols, positions, masses, energy): self.masses = masses self.groundstate_energy = energy + atoms = Atoms( - symbols=["Al"]*4, + symbols=["Al"] * 4, positions=[ [0.0, 0.0, 0.0], [0.5, 0.5, 0.0], [0.5, 0.0, 0.5], [0.0, 0.5, 0.5], ], - masses=[26.98]*4, + masses=[26.98] * 4, energy=2.54, ) diff --git a/bindings/python/tests/test_global_dlite_state.py b/bindings/python/tests/test_global_dlite_state.py index 0cfa25c70..849fcc351 100644 --- a/bindings/python/tests/test_global_dlite_state.py +++ b/bindings/python/tests/test_global_dlite_state.py @@ -7,9 +7,9 @@ thisdir = os.path.abspath(os.path.dirname(__file__)) -assert len(dlite.istore_get_uuids()) == 3 # 3 Hardcoded dlite instances +assert len(dlite.istore_get_uuids()) == 3 # 3 Hardcoded dlite instances -coll = dlite.Collection() # (1) +coll = dlite.Collection() # (1) assert dlite.has_instance(coll.uuid) assert coll.uuid in dlite.istore_get_uuids() assert len(dlite.istore_get_uuids()) == 3 + 1 @@ -17,14 +17,14 @@ # Must exist in imported dlite in different module (mod1) assert_exists_in_module(coll.uuid) -url = 'json://' + thisdir + '/MyEntity.json' + "?mode=r" -e = Instance.from_url(url) # (2) +url = "json://" + thisdir + "/MyEntity.json" + "?mode=r" +e = Instance.from_url(url) # (2) assert len(dlite.istore_get_uuids()) == 3 + 2 inst1 = Instance.from_metaid(e.uri, [3, 2]) # (3) assert len(dlite.istore_get_uuids()) == 3 + 3 -inst2 = Instance.from_metaid(e.uri, (3, 4), 'myinst') # (4) +inst2 = Instance.from_metaid(e.uri, (3, 4), "myinst") # (4) assert len(dlite.istore_get_uuids()) == 3 + 4 del inst1 @@ -32,20 +32,22 @@ # Use compile and exec with dlite defined in globals env = globals().copy() -filename=os.path.join(thisdir, 'global_dlite_state_mod2.py') +filename = os.path.join(thisdir, "global_dlite_state_mod2.py") with open(filename) as fd: - exec(compile(fd.read(), filename, 'exec'), env) + exec(compile(fd.read(), filename, "exec"), env) # mod2 has added one instance assert len(dlite.istore_get_uuids()) == 3 + 4 # Use importlib with mod3 -importlib.import_module('global_dlite_state_mod3') +importlib.import_module("global_dlite_state_mod3") # mod3 has added one instance assert len(dlite.istore_get_uuids()) == 3 + 5 -importlib.__import__('global_dlite_state_mod4', globals=env, locals=None, fromlist=(), level=0) +importlib.__import__( + "global_dlite_state_mod4", globals=env, locals=None, fromlist=(), level=0 +) # mod4 has added one instance assert len(dlite.istore_get_uuids()) == 3 + 6 diff --git a/bindings/python/tests/test_iri.py b/bindings/python/tests/test_iri.py index 4896341f3..0ce438a09 100644 --- a/bindings/python/tests/test_iri.py +++ b/bindings/python/tests/test_iri.py @@ -6,15 +6,15 @@ thisdir = os.path.abspath(os.path.dirname(__file__)) -url = 'json://' + thisdir + '/MyEntity.json' + "?mode=r" +url = "json://" + thisdir + "/MyEntity.json" + "?mode=r" E = dlite.Instance.from_url(url) -E.iri = 'http://emmo.info/emmo/EMMO_Physical' +E.iri = "http://emmo.info/emmo/EMMO_Physical" E.iri = None -E.iri = 'http://emmo.info/emmo/EMMO_Physical' +E.iri = "http://emmo.info/emmo/EMMO_Physical" e = E([3, 4]) -e.iri = 'abc' +e.iri = "abc" -p = E['properties'][3] -p.iri = 'http://emmo.info/emmo/EMMO_Length' +p = E["properties"][3] +p.iri = "http://emmo.info/emmo/EMMO_Length" diff --git a/bindings/python/tests/test_mapping.py b/bindings/python/tests/test_mapping.py index 726d4dfaa..ebfe7890b 100755 --- a/bindings/python/tests/test_mapping.py +++ b/bindings/python/tests/test_mapping.py @@ -5,38 +5,38 @@ # Configure search paths thisdir = Path(__file__).parent.absolute() -dlite.storage_path.append(f'{thisdir}/*.json') -dlite.python_mapping_plugin_path.append(f'{thisdir}/python-mapping-plugins') +dlite.storage_path.append(f"{thisdir}/*.json") +dlite.python_mapping_plugin_path.append(f"{thisdir}/python-mapping-plugins") # Create an instance of Person -Person = dlite.Instance.from_url(f'json:{thisdir}/Person.json?mode=r') +Person = dlite.Instance.from_url(f"json:{thisdir}/Person.json?mode=r") person = Person(dimensions=[2]) -person.name = 'Neil Armstrong' +person.name = "Neil Armstrong" person.age = 39 -person.skills = ['keping the head cold', 'famous quotes'] -#person.incref() +person.skills = ["keping the head cold", "famous quotes"] +# person.incref() # Map person to an instance of SimplePerson -simple = dlite.mapping('http://onto-ns.com/meta/0.1/SimplePerson', [person]) +simple = dlite.mapping("http://onto-ns.com/meta/0.1/SimplePerson", [person]) assert simple != person assert simple.name == person.name assert simple.age == person.age # Add the instance of SimplePerson to a collection coll = dlite.Collection() -coll.add('simple', simple) +coll.add("simple", simple) # Get the added person instance from the collection mapped to a new # instance of SimplePerson (the second argument can be omitted...) -s = coll.get('simple', 'http://onto-ns.com/meta/0.1/SimplePerson') +s = coll.get("simple", "http://onto-ns.com/meta/0.1/SimplePerson") assert s == simple -s2 = coll.get('simple') +s2 = coll.get("simple") assert s2 == s # Get the added person instance from the collection mapped to a new # instance of Person (with no skills) -p = coll.get('simple', 'http://onto-ns.com/meta/0.1/Person') +p = coll.get("simple", "http://onto-ns.com/meta/0.1/Person") assert p != person assert p.meta == person.meta assert p.name == person.name diff --git a/bindings/python/tests/test_misc.py b/bindings/python/tests/test_misc.py index 67b8a8982..f5153d6af 100755 --- a/bindings/python/tests/test_misc.py +++ b/bindings/python/tests/test_misc.py @@ -4,26 +4,39 @@ assert dlite.get_uuid_version() == 4 -assert dlite.get_uuid_version('abc') == 5 -assert dlite.get_uuid_version('6cb8e707-0fc5-5f55-88d4-d4fed43e64a8') == 0 -assert dlite.get_uuid('abc') == '6cb8e707-0fc5-5f55-88d4-d4fed43e64a8' -assert dlite.get_uuid('6cb8e707-0fc5-5f55-88d4-d4fed43e64a8') == ( - '6cb8e707-0fc5-5f55-88d4-d4fed43e64a8') +assert dlite.get_uuid_version("abc") == 5 +assert dlite.get_uuid_version("6cb8e707-0fc5-5f55-88d4-d4fed43e64a8") == 0 +assert dlite.get_uuid("abc") == "6cb8e707-0fc5-5f55-88d4-d4fed43e64a8" +assert dlite.get_uuid("6cb8e707-0fc5-5f55-88d4-d4fed43e64a8") == ( + "6cb8e707-0fc5-5f55-88d4-d4fed43e64a8" +) -assert dlite.join_meta_uri('name', 'version', 'ns') == 'ns/version/name' -assert dlite.split_meta_uri('ns/version/name') == ['name', 'version', 'ns'] +assert dlite.join_meta_uri("name", "version", "ns") == "ns/version/name" +assert dlite.split_meta_uri("ns/version/name") == ["name", "version", "ns"] -assert dlite.join_url('driver', 'loc', 'mode=r', 'fragment') == ( - 'driver://loc?mode=r#fragment') -assert dlite.join_url('driver', 'loc', 'mode=r') == 'driver://loc?mode=r' -assert dlite.join_url('driver', 'loc') == 'driver://loc' -assert dlite.join_url('driver', 'loc', fragment='frag') == 'driver://loc#frag' +assert dlite.join_url("driver", "loc", "mode=r", "fragment") == ( + "driver://loc?mode=r#fragment" +) +assert dlite.join_url("driver", "loc", "mode=r") == "driver://loc?mode=r" +assert dlite.join_url("driver", "loc") == "driver://loc" +assert dlite.join_url("driver", "loc", fragment="frag") == "driver://loc#frag" -assert dlite.split_url('driver://loc?mode=r#fragment') == [ - 'driver', 'loc', 'mode=r', 'fragment'] -assert dlite.split_url('driver://loc?mode=r&verbose=1') == [ - 'driver', 'loc', 'mode=r&verbose=1', ''] -assert dlite.split_url('driver://loc#fragment') == [ - 'driver', 'loc', '', 'fragment'] -assert dlite.split_url('loc#fragment') == [ - '', 'loc', '', 'fragment'] +assert dlite.split_url("driver://loc?mode=r#fragment") == [ + "driver", + "loc", + "mode=r", + "fragment", +] +assert dlite.split_url("driver://loc?mode=r&verbose=1") == [ + "driver", + "loc", + "mode=r&verbose=1", + "", +] +assert dlite.split_url("driver://loc#fragment") == [ + "driver", + "loc", + "", + "fragment", +] +assert dlite.split_url("loc#fragment") == ["", "loc", "", "fragment"] diff --git a/bindings/python/tests/test_paths.py b/bindings/python/tests/test_paths.py index b8716b2c1..cd3a879ea 100644 --- a/bindings/python/tests/test_paths.py +++ b/bindings/python/tests/test_paths.py @@ -4,26 +4,26 @@ import dlite -print('dlite storage paths:') +print("dlite storage paths:") for path in dlite.storage_path: - print('- ' + path) + print("- " + path) print() -print('append path with glob pattern:') +print("append path with glob pattern:") thisdir = Path(__file__).parent.absolute() -dlite.storage_path.append(f'{thisdir}/*.json') +dlite.storage_path.append(f"{thisdir}/*.json") for path in dlite.storage_path: - print('- ' + path) + print("- " + path) print() -print('delete second last path:') +print("delete second last path:") del dlite.storage_path[-2] for path in dlite.storage_path: - print('- ' + path) + print("- " + path) print() -print('Predefined paths:') -for (k,v) in dlite.__dict__.items(): - if k.endswith('path'): +print("Predefined paths:") +for k, v in dlite.__dict__.items(): + if k.endswith("path"): print(f"dlite.{k}='{v}'") diff --git a/bindings/python/tests/test_postgresql1_write.py b/bindings/python/tests/test_postgresql1_write.py index 6b4171bb2..33aecc914 100644 --- a/bindings/python/tests/test_postgresql1_write.py +++ b/bindings/python/tests/test_postgresql1_write.py @@ -19,20 +19,20 @@ def parse_pgconf(): Exit with code 44 (skip test) if the pgconf.h file does not exists. """ rootdir = thisdir.parent.parent.parent - pgconf = rootdir / 'storages/python/tests-c/pgconf.h' + pgconf = rootdir / "storages/python/tests-c/pgconf.h" if not pgconf.exists(): print(f"No configuration file: {pgconf}") print("For more info, see storages/python/README.md") sys.exit(44) regex = re.compile(r'^#define +(\w+) +"(\w+)"') - with open(pgconf, 'rt') as f: + with open(pgconf, "rt") as f: d = {} for line in f: matchobj = regex.match(line) if matchobj: keyword, value = matchobj.groups() d[keyword.lower()] = value - keywords = 'host,user,database,password'.split(',') + keywords = "host,user,database,password".split(",") return tuple(d.get(key) for key in keywords) @@ -54,17 +54,18 @@ def ping_server(server="localhost", port=5432, timeout=3): ping_server() # Add metadata to search path -dlite.storage_path.append(f'{thisdir}/Person.json') +dlite.storage_path.append(f"{thisdir}/Person.json") # Load dataset host, user, database, password = parse_pgconf() inst = dlite.Instance.from_location( - 'json', f'{thisdir}/persons.json', - id='51c0d700-9ab0-43ea-9183-6ea22012ebee', + "json", + f"{thisdir}/persons.json", + id="51c0d700-9ab0-43ea-9183-6ea22012ebee", ) # Save to postgresql DB inst.save( - f'postgresql://{host}?user={user};database={database};password={password}' + f"postgresql://{host}?user={user};database={database};password={password}" ) diff --git a/bindings/python/tests/test_postgresql2_read.py b/bindings/python/tests/test_postgresql2_read.py index 958f69b9d..f81e2850c 100644 --- a/bindings/python/tests/test_postgresql2_read.py +++ b/bindings/python/tests/test_postgresql2_read.py @@ -18,22 +18,22 @@ ping_server() # Add metadata to search path -dlite.storage_path.append(f'{thisdir}/Person.json') +dlite.storage_path.append(f"{thisdir}/Person.json") # Read from postgresql DB host, user, database, password = parse_pgconf() inst = dlite.Instance.from_location( - driver='postgresql', + driver="postgresql", location=host, - options=f'user={user};database={database};password={password}', - id='51c0d700-9ab0-43ea-9183-6ea22012ebee', + options=f"user={user};database={database};password={password}", + id="51c0d700-9ab0-43ea-9183-6ea22012ebee", ) print(inst) -assert inst.uuid == '51c0d700-9ab0-43ea-9183-6ea22012ebee' -assert inst.meta.uri == 'http://onto-ns.com/meta/0.1/Person' -assert inst.dimensions == {'N': 2} -assert inst.name == 'Jack Daniel' +assert inst.uuid == "51c0d700-9ab0-43ea-9183-6ea22012ebee" +assert inst.meta.uri == "http://onto-ns.com/meta/0.1/Person" +assert inst.dimensions == {"N": 2} +assert inst.name == "Jack Daniel" assert inst.age == 42.0 -assert inst.skills.tolist() == ['distilling', 'tasting'] +assert inst.skills.tolist() == ["distilling", "tasting"] diff --git a/bindings/python/tests/test_property_mappings.py b/bindings/python/tests/test_property_mappings.py index 8c6b8b1d2..ffffdad4d 100755 --- a/bindings/python/tests/test_property_mappings.py +++ b/bindings/python/tests/test_property_mappings.py @@ -10,6 +10,7 @@ import pint except ImportError as exc: import sys + print(f"Skipped: {exc}") sys.exit(44) # exit code marking the test to be skipped @@ -19,47 +20,47 @@ # Configure paths thisdir = Path(__file__).parent.absolute() -#exdir = thisdir / '../../../examples/dehydrogenation' +# exdir = thisdir / '../../../examples/dehydrogenation' # ## Import module with instances from dehydrogenation example -#module_name = 'molecular_energies' -#file_path = f'{exdir}/1-simple-workflow/molecular_energies.py' +# module_name = 'molecular_energies' +# file_path = f'{exdir}/1-simple-workflow/molecular_energies.py' # -#spec = importlib.util.spec_from_file_location(module_name, file_path) -#module = importlib.util.module_from_spec(spec) -#sys.modules[module_name] = module -#spec.loader.exec_module(module) +# spec = importlib.util.spec_from_file_location(module_name, file_path) +# module = importlib.util.module_from_spec(spec) +# sys.modules[module_name] = module +# spec.loader.exec_module(module) # -#CH4 = module.coll['CH4'] -#Molecule = CH4.meta +# CH4 = module.coll['CH4'] +# Molecule = CH4.meta # # # ## Load entities and instantiate a molecule -#dlite.storage_path.append(f'{exdir}/entities/*.json') -#Molecule = dlite.get_instance('http://onto-ns.com/meta/0.1/Molecule') -#Substance = dlite.get_instance('http://onto-ns.com/meta/0.1/Substance') +# dlite.storage_path.append(f'{exdir}/entities/*.json') +# Molecule = dlite.get_instance('http://onto-ns.com/meta/0.1/Molecule') +# Substance = dlite.get_instance('http://onto-ns.com/meta/0.1/Substance') # -#inst = Molecule(dims={'natoms': 3, 'ncoords': 3}) -#inst.name = '' +# inst = Molecule(dims={'natoms': 3, 'ncoords': 3}) +# inst.name = '' # # ## Create triplestore using the rdflib backend -#ts = Triplestore('rdflib') +# ts = Triplestore('rdflib') # ## Define some prefixed namespaces -#CHEM = ts.bind('chem', 'http://onto-ns.com/onto/chemistry#') +# CHEM = ts.bind('chem', 'http://onto-ns.com/onto/chemistry#') # ## Add mappings -#ts.add_mapsTo(CHEM.Identifier, Molecule, 'name') -#ts.add_mapsTo(CHEM.GroundStateEnergy, Molecule, 'groundstate_energy') -#ts.add_mapsTo(CHEM.Identifier, Substance, 'id') -#ts.add_mapsTo(CHEM.GroundStateEnergy, Substance, 'molecule_energy') +# ts.add_mapsTo(CHEM.Identifier, Molecule, 'name') +# ts.add_mapsTo(CHEM.GroundStateEnergy, Molecule, 'groundstate_energy') +# ts.add_mapsTo(CHEM.Identifier, Substance, 'id') +# ts.add_mapsTo(CHEM.GroundStateEnergy, Substance, 'molecule_energy') # # # # -#mappings = [ +# mappings = [ # ('http://onto-ns.com/meta/0.1/Molecule#name', ':mapsTo', # 'chem:Identifier'), # ('http://onto-ns.com/meta/0.1/Molecule#groundstate_energy', ':mapsTo', @@ -68,78 +69,77 @@ # 'chem:Identifier'), # ('http://onto-ns.com/meta/0.1/Substance#molecule_energy', ':mapsTo', # 'chem:GroundStateEnergy'), -#] +# ] # # -#match = dm.match_factory(mappings) -#match_first = dm.match_factory(mappings, match_first=True) - +# match = dm.match_factory(mappings) +# match_first = dm.match_factory(mappings, match_first=True) # Check unitconvert_pint -assert dm.unitconvert('km', 34, 'm') == 0.034 -assert dm.unitconvert('s', 1, 'hour') == 3600 +assert dm.unitconvert("km", 34, "m") == 0.034 +assert dm.unitconvert("s", 1, "hour") == 3600 # The Windows test has problems understanding the UFT-8 encoding "Å" below. # Skip it on Windows for now... if sys.platform != "win32": - assert dm.unitconvert("Å", 34, 'um') == 34e4 + assert dm.unitconvert("Å", 34, "um") == 34e4 # Test to manually set up mapping steps -v = dm.Value(3.0, 'm/s', 'emmo:Velocity', cost=1) -t = dm.Value(1.1, 's', 'emmo:Time', cost=2) -t2 = dm.Value(2.2, 's', 'emmo:Time', cost=4) -l = dm.Value(4.0, 'm', 'emmo:Length', cost=8) +v = dm.Value(3.0, "m/s", "emmo:Velocity", cost=1) +t = dm.Value(1.1, "s", "emmo:Time", cost=2) +t2 = dm.Value(2.2, "s", "emmo:Time", cost=4) +l = dm.Value(4.0, "m", "emmo:Length", cost=8) step1 = dm.MappingStep( - output_iri='emmo:Length', + output_iri="emmo:Length", steptype=dm.StepType.FUNCTION, - function=lambda v, t: v*t, - cost=lambda v, t: 2*v*t, - output_unit='m', + function=lambda v, t: v * t, + cost=lambda v, t: 2 * v * t, + output_unit="m", ) -step1.add_inputs({'v': v, 't': t}) -step1.add_inputs({'v': v, 't': t2}) +step1.add_inputs({"v": v, "t": t}) +step1.add_inputs({"v": v, "t": t2}) step2 = dm.MappingStep( - output_iri=':Length', + output_iri=":Length", steptype=dm.StepType.MAPSTO, cost=2, - output_unit='m', + output_unit="m", ) -step2.add_inputs({'l': step1}) +step2.add_inputs({"l": step1}) step3 = dm.MappingStep( - output_iri=':ReducedLength', + output_iri=":ReducedLength", steptype=dm.StepType.FUNCTION, - function=lambda l: 0.7*l, + function=lambda l: 0.7 * l, cost=10, - output_unit='m', + output_unit="m", ) -step3.add_inputs({'l': step1}) -step3.add_inputs({'l': step2}) -step3.add_inputs({'l': l}) +step3.add_inputs({"l": step1}) +step3.add_inputs({"l": step2}) +step3.add_inputs({"l": l}) def isclose(a, b, rtol=1e-3): """Returns true if the relative difference between `a` and `b` is less than `rtol`.""" - return True if abs((b - a)/b) <= rtol else False + return True if abs((b - a) / b) <= rtol else False assert step1.number_of_routes() == 2 assert step2.number_of_routes() == 2 assert step3.number_of_routes() == 5 -assert isclose(dm.Quantity(3*1.1, 'm'), step1.eval(0)) -assert isclose(dm.Quantity(3*2.2, 'm'), step1.eval(1)) -assert isclose(dm.Quantity(0.7*3*1.1, 'm'), step3.eval(0)) -assert isclose(dm.Quantity(0.7*3*2.2, 'm'), step3.eval(1)) -assert isclose(dm.Quantity(0.7*3*1.1, 'm'), step3.eval(2)) -assert isclose(dm.Quantity(0.7*3*2.2, 'm'), step3.eval(3)) -assert isclose(dm.Quantity(0.7*4.0, 'm'), step3.eval(4)) -assert isclose(dm.Quantity(0.7*4.0, 'm'), step3.eval()) +assert isclose(dm.Quantity(3 * 1.1, "m"), step1.eval(0)) +assert isclose(dm.Quantity(3 * 2.2, "m"), step1.eval(1)) +assert isclose(dm.Quantity(0.7 * 3 * 1.1, "m"), step3.eval(0)) +assert isclose(dm.Quantity(0.7 * 3 * 2.2, "m"), step3.eval(1)) +assert isclose(dm.Quantity(0.7 * 3 * 1.1, "m"), step3.eval(2)) +assert isclose(dm.Quantity(0.7 * 3 * 2.2, "m"), step3.eval(3)) +assert isclose(dm.Quantity(0.7 * 4.0, "m"), step3.eval(4)) +assert isclose(dm.Quantity(0.7 * 4.0, "m"), step3.eval()) costs = step3.lowest_costs(10) assert len(costs) == 5 @@ -151,8 +151,7 @@ def isclose(a, b, rtol=1e-3): assert isclose(30.2, costs[4][0]) - -#routes = dm.mapping_route( +# routes = dm.mapping_route( # target='http://onto-ns.com/meta/0.1/Substance#molecule_energy', # sources=['http://onto-ns.com/meta/0.1/Molecule#groundstate_energy'], # triples=mappings) @@ -160,90 +159,87 @@ def isclose(a, b, rtol=1e-3): # --------------------------------------- r = np.array([10, 20, 30, 40, 50, 60]) # particle radius [nm] -n = np.array([1, 3, 7, 6, 2, 1]) # particle number density [1e21 #/m^3] +n = np.array([1, 3, 7, 6, 2, 1]) # particle number density [1e21 #/m^3] -rv = dm.Value(r, 'nm', 'inst1') -nv = dm.Value(n, '1/m^3', 'inst2') +rv = dm.Value(r, "nm", "inst1") +nv = dm.Value(n, "1/m^3", "inst2") def average_radius(r, n): return np.sum(r * n) / np.sum(n) -mapsTo = 'http://emmo.info/domain-mappings#mapsTo' -instanceOf = 'http://emmo.info/datamodel#instanceOf' -subClassOf = 'http://www.w3.org/2000/01/rdf-schema#subClassOf' -#description = 'http://purl.org/dc/terms/description' -label = 'http://www.w3.org/2000/01/rdf-schema#label' -hasUnit = 'http://emmo.info/datamodel#hasUnit' -hasCost = ':hasCost' -RDF = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#' -type = RDF + 'type' -next = RDF + 'next' -first = RDF + 'first' -rest = RDF + 'rest' -nil = RDF + 'nil' -expects = 'https://w3id.org/function/ontology#expects' -returns = 'https://w3id.org/function/ontology#returns' +mapsTo = "http://emmo.info/domain-mappings#mapsTo" +instanceOf = "http://emmo.info/datamodel#instanceOf" +subClassOf = "http://www.w3.org/2000/01/rdf-schema#subClassOf" +# description = 'http://purl.org/dc/terms/description' +label = "http://www.w3.org/2000/01/rdf-schema#label" +hasUnit = "http://emmo.info/datamodel#hasUnit" +hasCost = ":hasCost" +RDF = "http://www.w3.org/1999/02/22-rdf-syntax-ns#" +type = RDF + "type" +next = RDF + "next" +first = RDF + "first" +rest = RDF + "rest" +nil = RDF + "nil" +expects = "https://w3id.org/function/ontology#expects" +returns = "https://w3id.org/function/ontology#returns" triples = [ # Mappings for data models - ('inst1', mapsTo, 'mo:ParticleRadius'), - ('inst2', mapsTo, 'mo:NumberDensity'), - ('inst3', mapsTo, 'mo:AverageParticleRadius'), - - ('inst1', hasUnit, 'um'), - ('inst2', hasUnit, '1/m**3'), - ('inst3', hasUnit, 'um'), - + ("inst1", mapsTo, "mo:ParticleRadius"), + ("inst2", mapsTo, "mo:NumberDensity"), + ("inst3", mapsTo, "mo:AverageParticleRadius"), + ("inst1", hasUnit, "um"), + ("inst2", hasUnit, "1/m**3"), + ("inst3", hasUnit, "um"), # Mappings for the function - (':r', mapsTo, 'mo:ParticleRadius'), - (':n', mapsTo, 'mo:NumberDensity'), - (':ravg', mapsTo, 'mo:AverageParticleRadius'), - - ('average_radius_function', type, 'fno:Function'), - ('average_radius_function', expects, 'parameter_list'), - ('average_radius_function', returns, 'output_list'), - ('parameter_list', type, 'rdf:List'), - ('parameter_list', first, ':r'), - ('parameter_list', rest, 'lst2'), - ('lst2', type, 'rdf:List'), - ('lst2', first, ':n'), - ('lst2', rest, nil), - (':r', type, 'fno:Parameter'), - (':r', label, 'r'), - #(':r', hasUnit, 'um'), - (':n', type, 'fno:Parameter'), - (':n', label, 'n'), - #(':n', hasUnit, '1/m**3'), - ('output_list', type, 'rdf:List'), - ('output_list', first, ':ravg'), - ('output_list', rest, nil), - (':ravg', type, 'fno:Output'), - #(':ravg', hasUnit, 'm'), + (":r", mapsTo, "mo:ParticleRadius"), + (":n", mapsTo, "mo:NumberDensity"), + (":ravg", mapsTo, "mo:AverageParticleRadius"), + ("average_radius_function", type, "fno:Function"), + ("average_radius_function", expects, "parameter_list"), + ("average_radius_function", returns, "output_list"), + ("parameter_list", type, "rdf:List"), + ("parameter_list", first, ":r"), + ("parameter_list", rest, "lst2"), + ("lst2", type, "rdf:List"), + ("lst2", first, ":n"), + ("lst2", rest, nil), + (":r", type, "fno:Parameter"), + (":r", label, "r"), + # (':r', hasUnit, 'um'), + (":n", type, "fno:Parameter"), + (":n", label, "n"), + # (':n', hasUnit, '1/m**3'), + ("output_list", type, "rdf:List"), + ("output_list", first, ":ravg"), + ("output_list", rest, nil), + (":ravg", type, "fno:Output"), + # (':ravg', hasUnit, 'm'), ] -ts2 = Triplestore('rdflib') +ts2 = Triplestore("rdflib") ts2.add_triples(triples) # Check fno_mapper d = dm.fno_mapper(ts2) -assert d[':ravg'] == [('average_radius_function', [':r', ':n'])] +assert d[":ravg"] == [("average_radius_function", [":r", ":n"])] step = dm.mapping_route( - target='inst3', - sources={'inst1': r, 'inst2': n}, + target="inst3", + sources={"inst1": r, "inst2": n}, triplestore=ts2, - function_repo={'average_radius_function': average_radius}, + function_repo={"average_radius_function": average_radius}, ) assert step.number_of_routes() == 1 -assert step.lowest_costs() == [(22., 0)] -assert step.eval(unit='m') == 34e-6 +assert step.lowest_costs() == [(22.0, 0)] +assert step.eval(unit="m") == 34e-6 print(step.show()) -print('*** eval:', step.eval()) +print("*** eval:", step.eval()) diff --git a/bindings/python/tests/test_pydantic.py b/bindings/python/tests/test_pydantic.py index 9bc277e42..4a6b59e9d 100644 --- a/bindings/python/tests/test_pydantic.py +++ b/bindings/python/tests/test_pydantic.py @@ -9,18 +9,22 @@ from pydantic import BaseModel, Field except ImportError: import sys + sys.exit(44) class TransformationStatus(BaseModel): """Return from transformation status.""" - id: str = Field(..., description="ID for the given transformation process.") + id: str = Field( + ..., description="ID for the given transformation process." + ) status: Optional[str] = Field( None, description="Status for the transformation process." ) messages: Optional[List[str]] = Field( - None, description="Messages related to the transformation process.", + None, + description="Messages related to the transformation process.", ) created: Optional[float] = Field( None, @@ -28,14 +32,17 @@ class TransformationStatus(BaseModel): "Given as POSIX time stamp.", ) startTime: Optional[int] = Field( - None, description="Time when the transformation process started. " + None, + description="Time when the transformation process started. " "Given as POSIX time stamp.", ) finishTime: Optional[datetime] = Field( - None, description="Time when the tranformation process finished. " + None, + description="Time when the tranformation process finished. " "Given as POSIX time stamp.", ) + now = datetime.now().timestamp() t = TransformationStatus( @@ -56,24 +63,25 @@ class TransformationStatus(BaseModel): assert inst.finishTime == str(dt) -#============================================================== +# ============================================================== # Test nested pydantic model -#============================================================== +# ============================================================== class Foo(BaseModel): count: int size: Optional[float] = -1 class Bar(BaseModel): - apple: str = 'x' - banana: str = 'y' + apple: str = "x" + banana: str = "y" class Spam(BaseModel): foo: Foo bars: List[Bar] -m = Spam(foo={'count': 4}, bars=[{'apple': 'x1'}, {'apple': 'x2'}]) + +m = Spam(foo={"count": 4}, bars=[{"apple": "x1"}, {"apple": "x2"}]) MetaFoo = pydantic_to_metadata(Foo) MetaBar = pydantic_to_metadata(Bar) @@ -87,7 +95,7 @@ class Spam(BaseModel): assert isinstance(spam.foo, dlite.Instance) assert spam.foo.count == 4 assert spam.foo.size == -1 -assert spam.bars[0].apple == 'x1' -assert spam.bars[0].banana == 'y' -assert spam.bars[1].apple == 'x2' -assert spam.bars[1].banana == 'y' +assert spam.bars[0].apple == "x1" +assert spam.bars[0].banana == "y" +assert spam.bars[1].apple == "x2" +assert spam.bars[1].banana == "y" diff --git a/bindings/python/tests/test_python_bindings.py b/bindings/python/tests/test_python_bindings.py index a533dc78b..0b4926950 100755 --- a/bindings/python/tests/test_python_bindings.py +++ b/bindings/python/tests/test_python_bindings.py @@ -11,7 +11,7 @@ # Wrap tests into a unittest TestCase # This way, we can run the individual tests interactively class ScriptTestCase(unittest.TestCase): - def __init__(self, methodname='testfile', filename=None): + def __init__(self, methodname="testfile", filename=None): unittest.TestCase.__init__(self, methodname) self.filename = filename @@ -20,10 +20,10 @@ def testfile(self): env.update(__file__=self.filename) with open(self.filename) as fd: try: - exec(compile(fd.read(), self.filename, 'exec'), env) + exec(compile(fd.read(), self.filename, "exec"), env) except SystemExit as exc: if exc.code == 44: - self.skipTest('exit code 44') + self.skipTest("exit code 44") else: raise exc @@ -31,23 +31,26 @@ def id(self): return self.filename def __str__(self): - return self.filename.split('tests/')[-1] + return self.filename.split("tests/")[-1] def __repr__(self): return "ScriptTestCase(filename='%s')" % self.filename def test(verbosity=1, stream=sys.stdout): - tests = [test for test in sorted(glob(os.path.join(thisdir, 'test_*.py'))) - if not test.endswith('__.py') - and not test.endswith('test_python_bindings.py') - # Exclude test_global_dlite_state.py since the global state - # that it is testing depends on the other tests. - and not test.endswith('test_global_dlite_state.py')] + tests = [ + test + for test in sorted(glob(os.path.join(thisdir, "test_*.py"))) + if not test.endswith("__.py") + and not test.endswith("test_python_bindings.py") + # Exclude test_global_dlite_state.py since the global state + # that it is testing depends on the other tests. + and not test.endswith("test_global_dlite_state.py") + ] ts = unittest.TestSuite() for test in sorted(tests): ts.addTest(ScriptTestCase(filename=os.path.abspath(test))) - with open(os.devnull, 'w') as devnull: + with open(os.devnull, "w") as devnull: if not verbosity: stream = devnull ttr = unittest.TextTestRunner(verbosity=verbosity, stream=stream) @@ -60,7 +63,7 @@ def test(verbosity=1, stream=sys.stdout): # copy stderr_fd before it is overwritten # NOTE: `copied` is inheritable on Windows when duplicating a # standard stream - with os.fdopen(os.dup(stderr_fd), 'wb') as copied: + with os.fdopen(os.dup(stderr_fd), "wb") as copied: sys.stdout.flush() sys.stderr.flush() try: @@ -77,7 +80,7 @@ def test(verbosity=1, stream=sys.stdout): if __name__ == "__main__": for k in sorted(os.environ.keys()): - for s in 'dlite', 'path', 'python': + for s in "dlite", "path", "python": if s in k.lower(): print("%35s : %-s" % (k, os.environ[k])) results = test(verbosity=2) diff --git a/bindings/python/tests/test_python_storage.py b/bindings/python/tests/test_python_storage.py index 20309907f..e0802daa1 100755 --- a/bindings/python/tests/test_python_storage.py +++ b/bindings/python/tests/test_python_storage.py @@ -27,7 +27,7 @@ HAVE_RDF = True -#thisdir = os.path.abspath(os.path.dirname(__file__)) +# thisdir = os.path.abspath(os.path.dirname(__file__)) thisdir = Path(__file__).resolve().parent @@ -47,32 +47,32 @@ def equal_rdf_files(path1, path2): # Test JSON -url = 'json://' + os.path.join(thisdir, 'Person.json') +url = "json://" + os.path.join(thisdir, "Person.json") Person = dlite.Instance.from_url(url) person = Person(dims=[2]) -person.name = 'Ada' +person.name = "Ada" person.age = 12.5 -person.skills = ['skiing', 'jumping'] +person.skills = ["skiing", "jumping"] -print('=== saving...') -with dlite.Storage('json', 'test.json', 'mode=w') as s: +print("=== saving...") +with dlite.Storage("json", "test.json", "mode=w") as s: s.save(person) -print('=== loading...', person.uuid) -with dlite.Storage('json', 'test.json', 'mode=r') as s: +print("=== loading...", person.uuid) +with dlite.Storage("json", "test.json", "mode=r") as s: inst = s.load(id=person.uuid) person2 = Person(dims=[3]) -person2.name = 'Berry' +person2.name = "Berry" person2.age = 24.3 -person2.skills = ['eating', 'sleeping', 'reading'] -with dlite.Storage('json://test.json') as s: +person2.skills = ["eating", "sleeping", "reading"] +with dlite.Storage("json://test.json") as s: s.save(person2) -s = dlite.Storage('json://test.json') +s = dlite.Storage("json://test.json") uuids = s.get_uuids() del s del uuids @@ -80,72 +80,72 @@ def equal_rdf_files(path1, path2): # ===================================================================== # Test the BSON and YAML Python plugins -input_dir = thisdir.parent.parent.parent / 'storages/python/tests-python/input' +input_dir = thisdir.parent.parent.parent / "storages/python/tests-python/input" if HAVE_BSON: # Test BSON - print('\n\n=== Test BSON plugin ===') - meta_file = input_dir / 'test_meta.bson' - meta_test_file = input_dir / 'test_meta_save.bson' - data_file = input_dir / 'test_data.bson' - data_test_file = input_dir / 'test_data_save.bson' - - print('Test loading metadata...') - with dlite.Storage('bson', meta_file, 'mode=r') as s: - meta = s.load('2b10c236-eb00-541a-901c-046c202e52fa') - print('...Loading metadata ok!') - - print('Test saving metadata...') - with dlite.Storage('bson', meta_test_file, 'mode=w') as s: + print("\n\n=== Test BSON plugin ===") + meta_file = input_dir / "test_meta.bson" + meta_test_file = input_dir / "test_meta_save.bson" + data_file = input_dir / "test_data.bson" + data_test_file = input_dir / "test_data_save.bson" + + print("Test loading metadata...") + with dlite.Storage("bson", meta_file, "mode=r") as s: + meta = s.load("2b10c236-eb00-541a-901c-046c202e52fa") + print("...Loading metadata ok!") + + print("Test saving metadata...") + with dlite.Storage("bson", meta_test_file, "mode=w") as s: s.save(meta) - with dlite.Storage('bson', meta_test_file, 'mode=r') as s: - inst2 = s.load('2b10c236-eb00-541a-901c-046c202e52fa') + with dlite.Storage("bson", meta_test_file, "mode=r") as s: + inst2 = s.load("2b10c236-eb00-541a-901c-046c202e52fa") if meta == inst2: - print('...Saving metadata ok!') + print("...Saving metadata ok!") else: - raise ValueError('...Saving metadata failed!') + raise ValueError("...Saving metadata failed!") os.remove(meta_test_file) del meta, inst2 - print('Test loading data...') - with dlite.Storage('bson', data_file, 'mode=r') as s: - inst1 = s.load('204b05b2-4c89-43f4-93db-fd1cb70f54ef') - inst2 = s.load('e076a856-e36e-5335-967e-2f2fd153c17d') - print('...Loading data ok!') + print("Test loading data...") + with dlite.Storage("bson", data_file, "mode=r") as s: + inst1 = s.load("204b05b2-4c89-43f4-93db-fd1cb70f54ef") + inst2 = s.load("e076a856-e36e-5335-967e-2f2fd153c17d") + print("...Loading data ok!") - print('Test saving data...') - with dlite.Storage('bson', data_test_file, 'mode=w') as s: + print("Test saving data...") + with dlite.Storage("bson", data_test_file, "mode=w") as s: s.save(inst1) s.save(inst2) - with dlite.Storage('bson', data_test_file, 'mode=r') as s: - inst3 = s.load('204b05b2-4c89-43f4-93db-fd1cb70f54ef') - inst4 = s.load('e076a856-e36e-5335-967e-2f2fd153c17d') + with dlite.Storage("bson", data_test_file, "mode=r") as s: + inst3 = s.load("204b05b2-4c89-43f4-93db-fd1cb70f54ef") + inst4 = s.load("e076a856-e36e-5335-967e-2f2fd153c17d") if inst1 == inst3 and inst2 == inst4: - print('...Saving data ok!') + print("...Saving data ok!") else: - raise ValueError('...Saving data failed!') + raise ValueError("...Saving data failed!") os.remove(data_test_file) - #del inst1, inst2, inst3, inst4 + # del inst1, inst2, inst3, inst4 else: - print('Skip testing BSON plugin - bson not installed') + print("Skip testing BSON plugin - bson not installed") if HAVE_YAML: # Test YAML - print('\n\n=== Test YAML plugin ===') - meta_file = input_dir / 'test_meta_soft7.yaml' - meta_test_file = input_dir / 'test_meta_save.yaml' - data_file = input_dir / 'test_data.yaml' - data_test_file = input_dir / 'test_data_save.yaml' - - print('Test loading metadata...') - with dlite.Storage('yaml', meta_file, 'mode=r') as s: - #meta = s.load('d9910bde-6028-524c-9e0f-e8f0db734bc8') - meta = s.load('http://onto-ns.com/meta/0.1/TestEntity') - print('...Loading metadata ok!') - - print('Test saving metadata...') - with dlite.Storage('yaml', meta_test_file, 'mode=w') as s: + print("\n\n=== Test YAML plugin ===") + meta_file = input_dir / "test_meta_soft7.yaml" + meta_test_file = input_dir / "test_meta_save.yaml" + data_file = input_dir / "test_data.yaml" + data_test_file = input_dir / "test_data_save.yaml" + + print("Test loading metadata...") + with dlite.Storage("yaml", meta_file, "mode=r") as s: + # meta = s.load('d9910bde-6028-524c-9e0f-e8f0db734bc8') + meta = s.load("http://onto-ns.com/meta/0.1/TestEntity") + print("...Loading metadata ok!") + + print("Test saving metadata...") + with dlite.Storage("yaml", meta_test_file, "mode=w") as s: s.save(meta) with open(meta_file, "r") as f: d1 = pyyaml.safe_load(f) @@ -153,17 +153,17 @@ def equal_rdf_files(path1, path2): d2 = pyyaml.safe_load(f) assert d1 == d2 - print('...Saving metadata ok!') + print("...Saving metadata ok!") os.remove(meta_test_file) - print('Test loading data...') - with dlite.Storage('yaml', data_file, 'mode=r') as s: - inst1 = s.load('52522ba5-6bfe-4a64-992d-e9ec4080fbac') - inst2 = s.load('2f8ba28c-add6-5718-a03c-ea46961d6ca7') - print('...Loading data ok!') + print("Test loading data...") + with dlite.Storage("yaml", data_file, "mode=r") as s: + inst1 = s.load("52522ba5-6bfe-4a64-992d-e9ec4080fbac") + inst2 = s.load("2f8ba28c-add6-5718-a03c-ea46961d6ca7") + print("...Loading data ok!") - print('Test saving data...') - with dlite.Storage('yaml', data_test_file, 'mode=w') as s: + print("Test saving data...") + with dlite.Storage("yaml", data_test_file, "mode=w") as s: s.save(inst1) s.save(inst2) with open(data_file, "r") as f: @@ -171,51 +171,51 @@ def equal_rdf_files(path1, path2): with open(data_test_file, "r") as f: d2 = pyyaml.safe_load(f) assert d1 == d2 - print('...Saving data ok!') + print("...Saving data ok!") os.remove(data_test_file) del inst1, inst2 else: - print('Skip testing YAML plugin - PyYAML not installed') + print("Skip testing YAML plugin - PyYAML not installed") if HAVE_RDF: # Test RDF - print('\n\n=== Test RDF plugin ===') - meta_file = input_dir / 'test_meta.ttl' - meta_test_file = meta_file.with_name(meta_file.stem + '_save.ttl') - data_file = input_dir / 'test_data.ttl' - data_test_file = data_file.with_name(data_file.stem + '_save.ttl') - - print('Test loading metadata...') - with dlite.Storage('pyrdf', meta_file, 'mode=r') as s: - meta = s.load('http://onto-ns.com/meta/0.2/myentity') - print('...Loading metadata ok!') - - print('Test saving metadata...') - with dlite.Storage('pyrdf', meta_test_file, 'mode=w') as s: + print("\n\n=== Test RDF plugin ===") + meta_file = input_dir / "test_meta.ttl" + meta_test_file = meta_file.with_name(meta_file.stem + "_save.ttl") + data_file = input_dir / "test_data.ttl" + data_test_file = data_file.with_name(data_file.stem + "_save.ttl") + + print("Test loading metadata...") + with dlite.Storage("pyrdf", meta_file, "mode=r") as s: + meta = s.load("http://onto-ns.com/meta/0.2/myentity") + print("...Loading metadata ok!") + + print("Test saving metadata...") + with dlite.Storage("pyrdf", meta_test_file, "mode=w") as s: s.save(meta) assert equal_rdf_files(meta_file, meta_test_file) - print('...Saving metadata ok!') + print("...Saving metadata ok!") os.remove(meta_test_file) from dlite.rdf import DM, PUBLIC_ID, from_rdf import rdflib from rdflib import URIRef, Literal - print('Test loading data...') - with dlite.Storage('pyrdf', data_file, 'mode=r') as s: - inst1 = s.load('inst_with_uri') - #inst1 = s.load('2713c649-e9b1-5f5e-8abb-8a6e3e610a61') - inst2 = s.load('67128279-c3fa-4483-8842-eb571f94a1ae') - print('...Loading data ok!') + print("Test loading data...") + with dlite.Storage("pyrdf", data_file, "mode=r") as s: + inst1 = s.load("inst_with_uri") + # inst1 = s.load('2713c649-e9b1-5f5e-8abb-8a6e3e610a61') + inst2 = s.load("67128279-c3fa-4483-8842-eb571f94a1ae") + print("...Loading data ok!") - print('Test saving data...') - with dlite.Storage('pyrdf', data_test_file, 'mode=w') as s: + print("Test saving data...") + with dlite.Storage("pyrdf", data_test_file, "mode=w") as s: s.save(inst1) s.save(inst2) assert equal_rdf_files(data_file, data_test_file) - print('...Saving data ok!') + print("...Saving data ok!") os.remove(data_test_file) del inst1, inst2 else: - print('Skip testing RDF plugin - rdflib not installed') + print("Skip testing RDF plugin - rdflib not installed") diff --git a/bindings/python/tests/test_rdf.py b/bindings/python/tests/test_rdf.py index 19d86261a..ab35e88f5 100644 --- a/bindings/python/tests/test_rdf.py +++ b/bindings/python/tests/test_rdf.py @@ -4,6 +4,7 @@ import rdflib except ImportError: import sys + sys.exit(44) import dlite @@ -16,8 +17,12 @@ inst = from_rdf(thisdir / "rdf.ttl", id=id) # Serialise `inst` to string `s` -s = to_rdf(inst, base_uri="http://onto-ns.com/data#", - base_prefix="onto", include_meta=True) +s = to_rdf( + inst, + base_uri="http://onto-ns.com/data#", + base_prefix="onto", + include_meta=True, +) # Check that content matches original serialisation with open(thisdir / "rdf.ttl", "r") as f: diff --git a/bindings/python/tests/test_ref_type.py b/bindings/python/tests/test_ref_type.py index 59c155f90..d970c2f3e 100644 --- a/bindings/python/tests/test_ref_type.py +++ b/bindings/python/tests/test_ref_type.py @@ -49,7 +49,8 @@ assert item1.next.next.next == item1 # one cycle # Create from json -inst = dlite.Instance.from_json(f""" +inst = dlite.Instance.from_json( + f""" {{ "meta": "http://onto-ns.com/meta/0.1/Linked", "dimensions": {{}}, @@ -57,7 +58,8 @@ "next": "{item0.uuid}" }} }} -""") +""" +) assert inst.next == item0 assert inst.next.next.next.next == item0 @@ -77,4 +79,4 @@ # Instantiate nested from dict # For issue #515 -#middle = Middle(properties={"name": "nested", "leaf": {"a": 1, "b": True}}) +# middle = Middle(properties={"name": "nested", "leaf": {"a": 1, "b": True}}) diff --git a/bindings/python/tests/test_storage.py b/bindings/python/tests/test_storage.py index 43f972d4c..4963e0706 100755 --- a/bindings/python/tests/test_storage.py +++ b/bindings/python/tests/test_storage.py @@ -6,6 +6,7 @@ try: import pytest + HAVE_PYTEST = True except ModuleNotFoundError: HAVE_PYTEST = False @@ -19,35 +20,37 @@ thisdir = os.path.abspath(os.path.dirname(__file__)) -url = 'json://' + thisdir + '/MyEntity.json' +url = "json://" + thisdir + "/MyEntity.json" # Load metadata (i.e. an instance of meta-metadata) from url s = dlite.Storage(url) myentity = dlite.Instance.from_storage( - s, 'http://onto-ns.com/meta/0.1/MyEntity') + s, "http://onto-ns.com/meta/0.1/MyEntity" +) del s with dlite.Storage(url) as s2: myentity2 = dlite.Instance.from_storage( - s2, 'http://onto-ns.com/meta/0.1/MyEntity') + s2, "http://onto-ns.com/meta/0.1/MyEntity" + ) # Create an instance -inst = myentity(dimensions=[2, 3], id='my-data') -inst['a-bool-array'] = True, False +inst = myentity(dimensions=[2, 3], id="my-data") +inst["a-bool-array"] = True, False # Test Storage.save() -with dlite.Storage('json', 'tmp.json', 'mode=w') as s: +with dlite.Storage("json", "tmp.json", "mode=w") as s: s.save(inst) # Test json -print('--- testing json') -myentity.save('json://myentity.json?mode=w') -inst.save('json://inst.json?mode=w') +print("--- testing json") +myentity.save("json://myentity.json?mode=w") +inst.save("json://inst.json?mode=w") del inst -inst = dlite.Instance.from_url(f'json://{thisdir}/inst.json#my-data') +inst = dlite.Instance.from_url(f"json://{thisdir}/inst.json#my-data") # Test yaml @@ -55,7 +58,7 @@ print('--- testing yaml') inst.save('yaml://inst.yaml?mode=w') del inst - inst = dlite.Instance.from_url('yaml://inst.yaml#my-data') + inst = dlite.Instance.from_url("yaml://inst.yaml#my-data") # test help() expected = """\ @@ -74,7 +77,7 @@ - `single`: Whether the input is assumed to be in single-entity form. If "auto" (default) the form will be inferred automatically. """ - s = dlite.Storage('yaml', 'inst.yaml', options='mode=a') + s = dlite.Storage("yaml", "inst.yaml", options="mode=a") assert s.help().strip() == expected.strip() # Test delete() @@ -83,32 +86,31 @@ assert len(s.get_uuids()) == 0 # Test to_bytes()/from_bytes() - data = inst.to_bytes('yaml') - data2 = data.replace(b'uri: my-data', b'uri: my-data2') - inst2 = dlite.Instance.from_bytes('yaml', data2) + data = inst.to_bytes("yaml") + data2 = data.replace(b"uri: my-data", b"uri: my-data2") + inst2 = dlite.Instance.from_bytes("yaml", data2) assert inst2.uuid != inst.uuid assert inst2.get_hash() == inst.get_hash() s.flush() # avoid calling flush() when the interpreter is teared down - # Test rdf try: - print('--- testing rdf') - inst.save('rdf:db.xml?mode=w;store=file;filename=inst.ttl;format=turtle') + print("--- testing rdf") + inst.save("rdf:db.xml?mode=w;store=file;filename=inst.ttl;format=turtle") except dlite.DLiteError: - print(' skipping rdf test') + print(" skipping rdf test") else: - #del inst + # del inst # FIXME: read from inst.ttl not db.xml - inst3 = dlite.Instance.from_url('rdf://db.xml#my-data') + inst3 = dlite.Instance.from_url("rdf://db.xml#my-data") # Tests for issue #587 if HAVE_YAML: bytearr = inst.to_bytes("yaml") -#print(bytes(bytearr).decode()) + #print(bytes(bytearr).decode()) if HAVE_PYTEST: with pytest.raises(dlite.DLiteError): inst.to_bytes("json") diff --git a/bindings/python/tests/test_storage_path.py b/bindings/python/tests/test_storage_path.py index 2144ad5ca..3b8691ea9 100644 --- a/bindings/python/tests/test_storage_path.py +++ b/bindings/python/tests/test_storage_path.py @@ -7,8 +7,8 @@ # Configure paths thisdir = Path(__file__).parent.absolute() -dlite.storage_path.append(thisdir / '*.txt') -dlite.storage_path.append(thisdir / '*.yaml') -dlite.storage_path.append(thisdir / 'entities' / '*.json') -Person = dlite.get_instance('http://onto-ns.com/meta/0.1/Person') +dlite.storage_path.append(thisdir / "*.txt") +dlite.storage_path.append(thisdir / "*.yaml") +dlite.storage_path.append(thisdir / "entities" / "*.json") +Person = dlite.get_instance("http://onto-ns.com/meta/0.1/Person") print(Person) diff --git a/bindings/python/tests/test_storage_plugins.py b/bindings/python/tests/test_storage_plugins.py new file mode 100644 index 000000000..31c32d8fb --- /dev/null +++ b/bindings/python/tests/test_storage_plugins.py @@ -0,0 +1,22 @@ +import dlite + + +# Storage plugins can be loaded +dlite.Storage.load_plugins() + +# Now plugins are loaded +plugins = set(dlite.StoragePluginIter()) +assert plugins +assert "json" in plugins + +# Plugins can be iterated over +print("Storage plugins") +plugins2 = set() +for name in dlite.StoragePluginIter(): + print(" -", name) + plugins2.add(name) +assert plugins2 == plugins + +# Unload json plugin +dlite.Storage.unload_plugin("json") +assert "json" not in set(dlite.StoragePluginIter()) diff --git a/bindings/python/tests/test_transaction.py b/bindings/python/tests/test_transaction.py index 73c5f27ea..50092456f 100644 --- a/bindings/python/tests/test_transaction.py +++ b/bindings/python/tests/test_transaction.py @@ -6,8 +6,8 @@ # Configure paths thisdir = Path(__file__).parent.absolute() -dlite.storage_path.append(thisdir / '*.json') -Person = dlite.get_instance('http://onto-ns.com/meta/0.1/Person') +dlite.storage_path.append(thisdir / "*.json") +Person = dlite.get_instance("http://onto-ns.com/meta/0.1/Person") person = Person(dims={"N": 4}) person.name = "Knud H. Thomsen" @@ -61,7 +61,7 @@ inst.snapshot() inst.age = 55 for i in range(6): - assert inst.get_snapshot(i).age == 55 - i*5 + assert inst.get_snapshot(i).age == 55 - i * 5 for i in range(4): assert inst.get_snapshot(i + 2).age == person.get_snapshot(3 + i).age @@ -81,5 +81,5 @@ db.unlink() # Make sure that the db is empty with dlite.Storage("json", db, "mode=w") as storage: - #person.push_snapshot(storage, 1) + # person.push_snapshot(storage, 1) storage.save(person) diff --git a/bindings/python/tests/test_utils.py b/bindings/python/tests/test_utils.py index e7dca122c..09ae06073 100644 --- a/bindings/python/tests/test_utils.py +++ b/bindings/python/tests/test_utils.py @@ -5,126 +5,74 @@ import dlite from dlite.utils import ( - instance_from_dict, to_metadata, infer_dimensions, - HAVE_DATACLASSES, HAVE_PYDANTIC - ) + instance_from_dict, + to_metadata, + infer_dimensions, + HAVE_DATACLASSES, + HAVE_PYDANTIC, +) thisdir = Path(__file__).absolute().parent -with open(thisdir / 'Person.json', 'rt') as f: +with open(thisdir / "Person.json", "rt") as f: d = json.load(f) Person = dlite.utils.instance_from_dict(d) person = Person([2]) -person.name = 'Ada' +person.name = "Ada" person.age = 12.5 -person.skills = ['skiing', 'jumping'] +person.skills = ["skiing", "jumping"] d1 = person.asdict() inst1 = instance_from_dict(d1) assert inst1.uuid == person.uuid assert inst1.meta.uuid == Person.uuid -assert inst1.name == 'Ada' +assert inst1.name == "Ada" assert inst1.age == 12.5 -assert all(inst1.skills == ['skiing', 'jumping']) +assert all(inst1.skills == ["skiing", "jumping"]) d2 = Person.asdict() inst2 = instance_from_dict(d2) assert inst2.uuid == Person.uuid -dlite.storage_path.append(thisdir / '*.json') +dlite.storage_path.append(thisdir / "*.json") d = { "uuid": "7ee0f569-1355-4eed-a2f7-0fc31378d56c", "meta": "http://onto-ns.com/meta/0.1/MyEntity", - "dimensions": { - "N": 2, - "M": 3 - }, + "dimensions": {"N": 2, "M": 3}, "properties": { "a-blob": "00112233445566778899aabbccddeeff", - "a-blob-array": [ - [ - b"abcd", - b"efgh" - ], - [ - b"ijkl", - b"mnop" - ] - ], + "a-blob-array": [[b"abcd", b"efgh"], [b"ijkl", b"mnop"]], "a-bool": True, - "a-bool-array": [ - False, - True - ], + "a-bool-array": [False, True], "an-int": 42, - "an-int-array": [ - -1, - -2, - -3 - ], + "an-int-array": [-1, -2, -3], "a-float": 3.14, - "a-float64-array": [ - 0.0, - 1.6022e-19, - 6.022e23 - ], + "a-float64-array": [0.0, 1.6022e-19, 6.022e23], "a-fixstring": "fix", - "a-fixstring-array": [ - [ - "one", - "two" - ], - [ - "three", - "four" - ] - ], + "a-fixstring-array": [["one", "two"], ["three", "four"]], "a-string": None, - "a-string-array": [ - [ - None, - "a string", - None - ], - [ - None, - None, - None - ] - ], - "a-relation": { - "s": "a-subject", - "p": "a-predicate", - "o": "a-object" - }, + "a-string-array": [[None, "a string", None], [None, None, None]], + "a-relation": {"s": "a-subject", "p": "a-predicate", "o": "a-object"}, "a-relation-array": [ - { - "s": "a1", - "p": "b1", - "o": "c1" - }, - { - "s": "a2", - "p": "b2", - "o": "c2" - } - ] - } + {"s": "a1", "p": "b1", "o": "c1"}, + {"s": "a2", "p": "b2", "o": "c2"}, + ], + }, } inst = instance_from_dict(d) print(inst) -url = 'json://' + os.path.join(thisdir, 'Person.json') +url = "json://" + os.path.join(thisdir, "Person.json") Person = dlite.Instance.from_url(url) person = Person([2]) -person.name = 'Ada' +person.name = "Ada" person.age = 12.5 -person.skills = ['skiing', 'jumping'] +person.skills = ["skiing", "jumping"] d1 = person.asdict() inst1 = instance_from_dict(d1) @@ -138,23 +86,23 @@ EntitySchema = get_dataclass_entity_schema() AtomsEntity = EntitySchema( - uri='http://onto-ns.com/meta/0.1/AtomsEntity', - description='A structure consisting of a set of atoms.', + uri="http://onto-ns.com/meta/0.1/AtomsEntity", + description="A structure consisting of a set of atoms.", dimensions={ - 'natoms': 'Number of atoms.', - 'ncoords': 'Number of coordinates. Always three.', + "natoms": "Number of atoms.", + "ncoords": "Number of coordinates. Always three.", }, properties={ - 'symbols': { - 'type': 'string', - 'dims': ['natoms'], - 'description': 'Chemical symbol of each atom.', + "symbols": { + "type": "string", + "dims": ["natoms"], + "description": "Chemical symbol of each atom.", }, - 'positions': { - 'type': 'float', - 'dims': ['natoms', 'ncoords'], - 'unit': 'Å', - 'description': 'Position of each atom.', + "positions": { + "type": "float", + "dims": ["natoms", "ncoords"], + "unit": "Å", + "description": "Position of each atom.", }, }, ) @@ -168,23 +116,23 @@ PydanticEntitySchema = get_pydantic_entity_schema() AtomsEntity2 = PydanticEntitySchema( - uri='http://onto-ns.com/meta/0.1/AtomsEntity', - description='A structure consisting of a set of atoms.', + uri="http://onto-ns.com/meta/0.1/AtomsEntity", + description="A structure consisting of a set of atoms.", dimensions={ - 'natoms': 'Number of atoms.', - 'ncoords': 'Number of coordinates. Always three.', + "natoms": "Number of atoms.", + "ncoords": "Number of coordinates. Always three.", }, properties={ - 'symbols': { - 'type': 'string', - 'dims': ['natoms'], - 'description': 'Chemical symbol of each atom.', + "symbols": { + "type": "string", + "dims": ["natoms"], + "description": "Chemical symbol of each atom.", }, - 'positions': { - 'type': 'float', - 'dims': ['natoms', 'ncoords'], - 'unit': 'Å', - 'description': 'Position of each atom.', + "positions": { + "type": "float", + "dims": ["natoms", "ncoords"], + "unit": "Å", + "description": "Position of each atom.", }, }, ) @@ -197,21 +145,28 @@ # TODO - test also exceptions dims = infer_dimensions( meta=inst.meta, - values={'a-string-array': [('a', 'b'), ('c', 'd'), ('e', 'f')]}, + values={"a-string-array": [("a", "b"), ("c", "d"), ("e", "f")]}, ) assert dims == dict(N=3, M=2) dims = infer_dimensions( meta=inst.meta, - values={'a-string-array': [('a', 'b'), ('c', 'd'), ('e', 'f')], - 'a-fixstring-array': [ - ('a', 'b', 'c'), ('a', 'b', 'c'), ('a', 'b', 'c')]}, + values={ + "a-string-array": [("a", "b"), ("c", "d"), ("e", "f")], + "a-fixstring-array": [ + ("a", "b", "c"), + ("a", "b", "c"), + ("a", "b", "c"), + ], + }, ) assert dims == dict(N=3, M=2) dims = infer_dimensions( meta=inst.meta, - values={'an-int-array': [1, 2, 3, 4], - 'a-fixstring-array': [('Al', 'Mg'), ('Si', 'Cu')]}, + values={ + "an-int-array": [1, 2, 3, 4], + "a-fixstring-array": [("Al", "Mg"), ("Si", "Cu")], + }, ) assert dims == dict(N=2, M=4) diff --git a/bindings/python/utils.py b/bindings/python/utils.py index 7b27e100d..0f1704fc5 100644 --- a/bindings/python/utils.py +++ b/bindings/python/utils.py @@ -31,6 +31,7 @@ class MissingDependencyError(dlite.DLiteError): pip install """ + exit_code = 44 @@ -55,6 +56,7 @@ def uncaught_exception_hook(exetype, value, trace): if hasattr(value, "exit_code"): sys.exit(value.exit_code) + sys.excepthook, oldhook = uncaught_exception_hook, sys.excepthook @@ -83,49 +85,59 @@ def instance_from_dict(d, id=None, single=None, check_storages=True): Whether to check if the instance already exists in storages specified in `dlite.storage_path`. """ - if single is None or single == 'auto': - single = True if 'properties' in d else False + if single is None or single == "auto": + single = True if "properties" in d else False if single: - if not id and 'uuid' not in d and 'uri' not in d: - if 'namespace' in d and 'version' in d and 'name' in d: + if not id and "uuid" not in d and "uri" not in d: + if "namespace" in d and "version" in d and "name" in d: id = f"{d['namespace']}/{d['version']}/{d['name']}" else: - raise ValueError('`id` required for dicts in single-entry ' - 'form with no explicit uuid or uri.') + raise ValueError( + "`id` required for dicts in single-entry " + "form with no explicit uuid or uri." + ) else: if not id: if len(d) == 1: - id, = d.keys() + (id,) = d.keys() else: - raise ValueError('`id` required for dicts in multi-entry form.') + raise ValueError( + "`id` required for dicts in multi-entry form." + ) if id in d: - return instance_from_dict(d[id], id=id, single=True, - check_storages=check_storages) + return instance_from_dict( + d[id], id=id, single=True, check_storages=check_storages + ) else: uuid = dlite.get_uuid(id) if uuid in d: - return instance_from_dict(d[uuid], id=id, single=True, - check_storages=check_storages) + return instance_from_dict( + d[uuid], id=id, single=True, check_storages=check_storages + ) else: - raise ValueError(f'no such id in dict: {id}') + raise ValueError(f"no such id in dict: {id}") - if 'uri' in d or 'uuid' in d: - if 'uri' in d and 'uuid' in d: - if dlite.get_uuid(d['uri']) != d['uuid']: - raise dlite.DLiteError('uri and uuid in dict are not consistent') - uuid = dlite.get_uuid(str(d.get('uuid', d.get('uri')))) + if "uri" in d or "uuid" in d: + if "uri" in d and "uuid" in d: + if dlite.get_uuid(d["uri"]) != d["uuid"]: + raise dlite.DLiteError( + "uri and uuid in dict are not consistent" + ) + uuid = dlite.get_uuid(str(d.get("uuid", d.get("uri")))) if id: if dlite.get_uuid(id) != uuid: - raise ValueError(f'`id` is not consistent with uri/uuid in dict') + raise ValueError( + f"`id` is not consistent with uri/uuid in dict" + ) - meta = dlite.get_instance(d.get('meta', dlite.ENTITY_SCHEMA)) + meta = dlite.get_instance(d.get("meta", dlite.ENTITY_SCHEMA)) if meta.is_metameta: - if 'uri' in d: - uri = d['uri'] + if "uri" in d: + uri = d["uri"] else: - uri = dlite.join_meta_uri(d['name'], d['version'], d['namespace']) + uri = dlite.join_meta_uri(d["name"], d["version"], d["namespace"]) if check_storages: try: @@ -136,48 +148,59 @@ def instance_from_dict(d, id=None, single=None, check_storages=True): except dlite.DLiteError: pass - if isinstance(d['dimensions'], Sequence): - dimensions = [dlite.Dimension(d['name'], d.get('description')) - for d in d['dimensions']] - elif isinstance(d['dimensions'], Mapping): - dimensions = [dlite.Dimension(k, v) - for k, v in d['dimensions'].items()] + if isinstance(d["dimensions"], Sequence): + dimensions = [ + dlite.Dimension(d["name"], d.get("description")) + for d in d["dimensions"] + ] + elif isinstance(d["dimensions"], Mapping): + dimensions = [ + dlite.Dimension(k, v) for k, v in d["dimensions"].items() + ] else: raise TypeError( - "`dimensions` must be either a sequence or a mapping") + "`dimensions` must be either a sequence or a mapping" + ) props = [] - if isinstance(d['properties'], Sequence): - for p in d['properties']: - props.append(dlite.Property( - name=p['name'], - type=p['type'], - dims=p.get('shape', p.get('dims')), - unit=p.get('unit'), - description=p.get('description'), - )) - elif isinstance(d['properties'], Mapping): - for k, v in d['properties'].items(): - props.append(dlite.Property( - name = k, - type = v['type'], - dims=v.get('shape', v.get('dims')), - unit=v.get('unit'), - description=v.get('description'), - )) + if isinstance(d["properties"], Sequence): + for p in d["properties"]: + props.append( + dlite.Property( + name=p["name"], + type=p["type"], + dims=p.get("shape", p.get("dims")), + unit=p.get("unit"), + description=p.get("description"), + ) + ) + elif isinstance(d["properties"], Mapping): + for k, v in d["properties"].items(): + props.append( + dlite.Property( + name=k, + type=v["type"], + dims=v.get("shape", v.get("dims")), + unit=v.get("unit"), + description=v.get("description"), + ) + ) else: raise TypeError( - "`properties` must be either a sequence or a mapping") + "`properties` must be either a sequence or a mapping" + ) - inst = dlite.Instance.create_metadata(uri, dimensions, props, - d.get('description')) + inst = dlite.Instance.create_metadata( + uri, dimensions, props, d.get("description") + ) else: - dims = [d['dimensions'][dim.name] - for dim in meta.properties['dimensions']] - inst_id = d.get('uri', d.get('uuid', id)) + dims = [ + d["dimensions"][dim.name] for dim in meta.properties["dimensions"] + ] + inst_id = d.get("uri", d.get("uuid", id)) inst = dlite.Instance.from_metaid(meta.uri, dims=dims, id=inst_id) - for p in meta['properties']: - value = d['properties'][p.name] + for p in meta["properties"]: + value = d["properties"][p.name] inst[p.name] = value return inst @@ -207,8 +230,10 @@ def to_metadata(obj): ) d = obj.dict() else: - raise TypeError('obj can be dict, json string, dataclasses instance ' - f'or pydantic instance. Got {type(obj)}') + raise TypeError( + "obj can be dict, json string, dataclasses instance " + f"or pydantic instance. Got {type(obj)}" + ) return instance_from_dict(d) @@ -221,7 +246,7 @@ def get_dataclass_entity_schema(): @dataclass class Property: type: str - #@ref: Optional[str] # Should we rename this to "ref"? See issue #595 + # @ref: Optional[str] # Should we rename this to "ref"? See issue #595 shape: Optional[List[str]] unit: Optional[str] description: Optional[str] @@ -237,11 +262,11 @@ class EntitySchema: def pydantic_to_property( - name: str, - propdict: dict, - dimensions: "Optional[dict]" = None, - namespace: str = "http://onto-ns.com/meta", - version: str = "0.1", + name: str, + propdict: dict, + dimensions: "Optional[dict]" = None, + namespace: str = "http://onto-ns.com/meta", + version: str = "0.1", ): """Return a dlite property from a name and a pydantic property dict. @@ -262,8 +287,9 @@ def pydantic_to_property( raise MissingDependencyError("pydantic") # Map simple pydantic types to corresponding dlite types - simple_types = dict(boolean="bool", integer="int64", number="float64", - string="string") + simple_types = dict( + boolean="bool", integer="int64", number="float64", string="string" + ) if dimensions is None: dimensions = {} @@ -314,12 +340,16 @@ def pydantic_to_property( for dim in shape: dimensions.setdefault(dim, f"Number of {dim}.") return dlite.Property( - name, subprop.type, ref=subprop.ref, dims=shape, - unit=unit, description=descr, + name, + subprop.type, + ref=subprop.ref, + dims=shape, + unit=unit, + description=descr, ) if ptype == "ref": - refname = propdict['$ref'].rsplit('/', 1)[-1] + refname = propdict["$ref"].rsplit("/", 1)[-1] ref = f"{namespace}/{version}/{refname}" prop = dlite.Property( name, "ref", ref=ref, unit=unit, description=descr @@ -330,11 +360,11 @@ def pydantic_to_property( def pydantic_to_metadata( - model, - uri=None, - default_namespace="http://onto-ns.com/meta", - default_version="0.1", - metaid=dlite.ENTITY_SCHEMA, + model, + uri=None, + default_namespace="http://onto-ns.com/meta", + default_version="0.1", + metaid=dlite.ENTITY_SCHEMA, ): """Create a new dlite metadata from a pydantic model. @@ -359,14 +389,20 @@ def pydantic_to_metadata( dimensions = {} properties = [] for name, descr in d["properties"].items(): - properties.append(pydantic_to_property( - name, descr, dimensions, default_namespace, default_version)) + properties.append( + pydantic_to_property( + name, descr, dimensions, default_namespace, default_version + ) + ) dims = [dlite.Dimension(k, v) for k, v in dimensions.items()] return dlite.Instance.create_metadata( - uri, dims, properties, + uri, + dims, + properties, d.get("description", ""), ) + def pydantic_to_instance(meta, pydinst): """Return a new dlite instance from a pydantic instance `pydinst`.""" if not HAVE_PYDANTIC: @@ -378,7 +414,7 @@ def pydantic_to_instance(meta, pydinst): inst = meta(dimensions) def getval(p, v): - if p.type == 'ref': + if p.type == "ref": if dlite.has_instance(p.ref): submeta = dlite.get_instance(p.ref) return pydantic_to_instance(submeta, v) @@ -396,6 +432,7 @@ def getval(p, v): return inst + def get_pydantic_entity_schema(): """Returns the datamodel for dataclasses in Python standard library.""" if not HAVE_PYDANTIC: @@ -419,7 +456,7 @@ class EntitySchema(BaseModel): def get_package_paths(): """Returns a dict with all the DLite builtin path variables.""" - return {k: v for k, v in dlite.__dict__.items() if k.endswith('_path')} + return {k: v for k, v in dlite.__dict__.items() if k.endswith("_path")} def infer_dimensions(meta, values, strict=True): @@ -446,37 +483,42 @@ def infer_dimensions(meta, values, strict=True): if strict: propnames = {propname for propname in values.keys()} extra_props = propnames.difference( - {prop.name for prop in meta['properties']}) + {prop.name for prop in meta["properties"]} + ) if extra_props: raise CannotInferDimensionError( - f'invalid property names in `values`: {extra_props}') + f"invalid property names in `values`: {extra_props}" + ) dims = {} - for prop in meta['properties']: + for prop in meta["properties"]: if prop.name in values and prop.ndims: with warnings.catch_warnings(): warnings.filterwarnings( "ignore", message="The unit of the quantity is stripped when " - "downcasting to ndarray." + "downcasting to ndarray.", ) v = np.array(values[prop.name]) if len(v.shape) != prop.ndims: raise InvalidNumberOfDimensionsError( - f'property {prop.name} has {prop.ndims} dimensions, but ' - f'{len(v.shape)} was provided') + f"property {prop.name} has {prop.ndims} dimensions, but " + f"{len(v.shape)} was provided" + ) for i, dimname in enumerate(prop.shape): if dimname in dims and v.shape[i] != dims[dimname]: raise CannotInferDimensionError( f'inconsistent assignment of dimension "{dimname}" ' - f'when checking property "{prop.name}"') + f'when checking property "{prop.name}"' + ) dims[dimname] = v.shape[i] - dimnames = {d.name for d in meta['dimensions']} - if len(dims) != len(meta['dimensions']): + dimnames = {d.name for d in meta["dimensions"]} + if len(dims) != len(meta["dimensions"]): missing_dims = dimnames.difference(dims.keys()) raise CannotInferDimensionError( - f'insufficient number of properties provided to infer dimensions: ' - f'{missing_dims}') + f"insufficient number of properties provided to infer dimensions: " + f"{missing_dims}" + ) return dims diff --git a/src/dlite-storage-plugins.h b/src/dlite-storage-plugins.h index 2ea6f8f56..4523ea8aa 100644 --- a/src/dlite-storage-plugins.h +++ b/src/dlite-storage-plugins.h @@ -272,7 +272,7 @@ typedef int (*Flush)(DLiteStorage *s); Returns a malloc'ed string with plugin documentation or NULL on error. Optional. */ -typedef char *(*Help)(DLiteStorage *s); +typedef char *(*Help)(const DLiteStoragePlugin *api); /** @} */ diff --git a/src/dlite-storage.c b/src/dlite-storage.c index 777f44793..5d974a4c5 100644 --- a/src/dlite-storage.c +++ b/src/dlite-storage.c @@ -261,7 +261,7 @@ int dlite_storage_delete(DLiteStorage *s, const char *id) */ char *dlite_storage_help(DLiteStorage *s) { - if (s->api->help) return s->api->help(s); + if (s->api->help) return s->api->help(s->api); return err(dliteUnsupportedError, "storage does not support help: %s", s->api->name), NULL; } diff --git a/src/utils/plugin.c b/src/utils/plugin.c index 77bf9aa65..901d96235 100644 --- a/src/utils/plugin.c +++ b/src/utils/plugin.c @@ -56,7 +56,7 @@ int plugin_decref(Plugin *plugin) /* Creates a new plugin type and returns a pointer to information about it. - `name` is the name of the new plugin type. + `kind` is the name of the new plugin kind. `symbol` is the name of the function that plugins should define `envvar` is the name of environment variable with plugin search path `state` pointer to global state passed to the plugin function. diff --git a/storages/python/dlite-plugins-python.c b/storages/python/dlite-plugins-python.c index 5487f54c3..af733036b 100644 --- a/storages/python/dlite-plugins-python.c +++ b/storages/python/dlite-plugins-python.c @@ -164,17 +164,17 @@ int flusher(DLiteStorage *s) It combines the class documentation with the documentation of the open() method. */ -char *helper(DLiteStorage *s) +char *helper(const DLiteStoragePlugin *api) { PyObject *v=NULL, *pyclassdoc=NULL, *open=NULL, *pyopendoc=NULL; - PyObject *class = (PyObject *)s->api->data; + PyObject *class = (PyObject *)api->data; const char *classname, *classdoc=NULL, *opendoc=NULL; char *doc=NULL; Py_ssize_t n=0, clen=0, olen=0, i, newlines=0; dlite_errclr(); if (!(classname = dlite_pyembed_classname(class))) - dlite_warnx("cannot get class name for storage plugin '%s'", s->api->name); + dlite_warnx("cannot get class name for storage plugin '%s'", api->name); if (PyObject_HasAttrString(class, "__doc__")) { if (!(pyclassdoc = PyObject_GetAttrString(class, "__doc__"))) diff --git a/storages/python/python-storage-plugins/blob.py b/storages/python/python-storage-plugins/blob.py index 49f9d5532..f5bbdb4bc 100644 --- a/storages/python/python-storage-plugins/blob.py +++ b/storages/python/python-storage-plugins/blob.py @@ -16,18 +16,18 @@ def open(self, uri, options=None): def load(self, id): """Loads `uuid` from current storage and return it as a new instance.""" - metaid = 'http://onto-ns.com/meta/0.1/Blob' + metaid = "http://onto-ns.com/meta/0.1/Blob" if id == metaid: # break recursive search for metadata - raise dlite.DLiteError(f'no metadata in blob storage') - with open(self.uri, 'rb') as f: + raise dlite.DLiteError(f"no metadata in blob storage") + with open(self.uri, "rb") as f: content = f.read() meta = dlite.get_instance(metaid) inst = meta(dims=[len(content)]) - inst.content = np.frombuffer(content, dtype='uint8') + inst.content = np.frombuffer(content, dtype="uint8") return inst def save(self, inst): """Stores `inst` in current storage.""" - with open(self.uri, 'wb') as f: + with open(self.uri, "wb") as f: f.write(inst.content) diff --git a/storages/python/python-storage-plugins/bson.py b/storages/python/python-storage-plugins/bson.py index 88ba031b7..129202f77 100644 --- a/storages/python/python-storage-plugins/bson.py +++ b/storages/python/python-storage-plugins/bson.py @@ -2,7 +2,7 @@ import os from typing import TYPE_CHECKING -import bson as pybson # Must be pymongo.bson +import bson as pybson # Must be pymongo.bson import dlite from dlite.options import Options @@ -38,7 +38,9 @@ def open(self, uri: str, options: "Optional[str]" = None) -> None: The BSON data is translated to JSON. """ self.options = Options(options, defaults="mode=a;soft7=true") - self.mode = dict(r="rb", w="wb", a="rb+", append="rb+")[self.options.mode] + self.mode = dict(r="rb", w="wb", a="rb+", append="rb+")[ + self.options.mode + ] if self.mode == "rb" and not os.path.exists(uri): raise FileNotFoundError(f"Did not find URI {uri!r}") @@ -71,7 +73,7 @@ def close(self) -> None: for uuid in self.queue(): props = self._data[uuid]["properties"] - if isinstance(props, dict): # Metadata props is list + if isinstance(props, dict): # Metadata props is list for key in props: if isinstance(props[key], (bytearray, bytes)): props[key] = props[key].hex() @@ -101,9 +103,13 @@ def save(self, inst: dlite.Instance) -> None: inst: A DLite Instance to store in the BSON storage. """ - self._data[inst.uuid] = inst.asdict(soft7=dlite.asbool(self.options.soft7)) + self._data[inst.uuid] = inst.asdict( + soft7=dlite.asbool(self.options.soft7) + ) - def queue(self, pattern: "Optional[str]" = None) -> "Generator[str, None, None]": + def queue( + self, pattern: "Optional[str]" = None + ) -> "Generator[str, None, None]": """Generator method that iterates over all UUIDs in the storage whose metadata URI matches global pattern. diff --git a/storages/python/python-storage-plugins/csv.py b/storages/python/python-storage-plugins/csv.py index 63f87db32..f81a53fab 100644 --- a/storages/python/python-storage-plugins/csv.py +++ b/storages/python/python-storage-plugins/csv.py @@ -141,8 +141,12 @@ def infer_prop_name(name: str) -> str: Inferred property name. """ - return name.strip(' "').rsplit("(", 1)[0].rsplit("[", 1)[0].strip().replace( - " ", "_" + return ( + name.strip(' "') + .rsplit("(", 1)[0] + .rsplit("[", 1)[0] + .strip() + .replace(" ", "_") ) diff --git a/storages/python/python-storage-plugins/mongodb.py b/storages/python/python-storage-plugins/mongodb.py index 4aac51a99..466a5e2ba 100644 --- a/storages/python/python-storage-plugins/mongodb.py +++ b/storages/python/python-storage-plugins/mongodb.py @@ -47,22 +47,18 @@ def _parse_options(self, options): defaults="database=dlite;collection=dlite_coll;mode=r;mock=no;" "user=guest;password=guest", ) - parsed_options.setdefault('password', None) + parsed_options.setdefault("password", None) return parsed_options def _configure(self, parsed_options, uri): """Configure and connect to the MongoDB database.""" - self.writable = True if 'w' in parsed_options.mode else False + self.writable = True if "w" in parsed_options.mode else False client_options = { - k: parsed_options[k] for k in parsed_options if 'MONGOCLIENT_' in k + k: parsed_options[k] for k in parsed_options if "MONGOCLIENT_" in k } - user = ( - quote_plus(parsed_options.user) - if parsed_options.user - else None - ) + user = quote_plus(parsed_options.user) if parsed_options.user else None password = ( quote_plus(parsed_options.password) if parsed_options.password @@ -70,12 +66,12 @@ def _configure(self, parsed_options, uri): ) # Determine the schema based on the presence of "localhost" or "127.0.0.1" in the URI - schema = parsed_options.get('schema', None) + schema = parsed_options.get("schema", None) if schema is None: if "localhost" in uri or "127.0.0.1" in uri: - schema = 'mongodb' + schema = "mongodb" else: - schema = 'mongodb+srv' + schema = "mongodb+srv" # Remove any existing schema from the URI if not uri.startswith(schema + "://"): @@ -87,13 +83,16 @@ def _configure(self, parsed_options, uri): if dlite.asbool(parsed_options.mock): import mongomock - mongo_url = urlparse(f'mongodb://{uri}') + + mongo_url = urlparse(f"mongodb://{uri}") port = mongo_url.port if mongo_url.port else 27017 @mongomock.patch(servers=((mongo_url.hostname, port),)) def get_client(): return open_client() + else: + def get_client(): return open_client() @@ -102,14 +101,14 @@ def open_client(): host=final_uri, username=user, password=password, - **client_options + **client_options, ) return client self.client = get_client() - self.collection = ( - self.client[parsed_options.database][parsed_options.collection] - ) + self.collection = self.client[parsed_options.database][ + parsed_options.collection + ] self.options = parsed_options def close(self): @@ -119,10 +118,10 @@ def close(self): def load(self, id): """Loads `id` from current storage and return it as a new instance.""" uuid = dlite.get_uuid(id) - document = self.collection.find_one({'uuid': uuid}) + document = self.collection.find_one({"uuid": uuid}) if not document: raise IOError( - f'No instance with {uuid=} in MongoDB database ' + f"No instance with {uuid=} in MongoDB database " f'"{self.collection.database.name}" and collection ' f'"{self.collection.name}"' ) diff --git a/storages/python/python-storage-plugins/postgresql.py b/storages/python/python-storage-plugins/postgresql.py index e03a26387..2a2d6c05c 100644 --- a/storages/python/python-storage-plugins/postgresql.py +++ b/storages/python/python-storage-plugins/postgresql.py @@ -14,36 +14,38 @@ # Translation table from dlite types to postgresql types pgtypes = { - 'blob': 'bytea', - 'bool': 'bool', - 'int': 'integer', - 'int8': 'bytea', - 'int16': 'smallint', - 'int32': 'integer', - 'int64': 'bigint', - 'uint16': 'integer', - 'uint32': 'bigint', - 'float': 'real', - 'double': 'float8', - 'float32': 'real', - 'float64': 'float8', - 'string': 'varchar', - 'dimension': 'varchar[2]', - 'property': 'varchar[5]', - 'relation': 'varchar[3]', + "blob": "bytea", + "bool": "bool", + "int": "integer", + "int8": "bytea", + "int16": "smallint", + "int32": "integer", + "int64": "bigint", + "uint16": "integer", + "uint32": "bigint", + "float": "real", + "double": "float8", + "float32": "real", + "float64": "float8", + "string": "varchar", + "dimension": "varchar[2]", + "property": "varchar[5]", + "relation": "varchar[3]", } + def to_pgtype(typename): """Returns PostGreSQL type corresponding to dlite typename.""" if typename in pgtypes: return pgtypes[typename] else: - t = typename.rstrip('0123456789') + t = typename.rstrip("0123456789") return pgtypes[t] class postgresql(dlite.DLiteStorageBase): """DLite storage plugin for PostgreSQL.""" + def open(self, uri, options=None): """Opens `uri`. @@ -68,18 +70,22 @@ def open(self, uri, options=None): `writable` to true if it is writable and to false otherwise. If `writable` is not set, it is assumed to be true. """ - self.options = Options(options, defaults='database=dlite;mode=append') + self.options = Options(options, defaults="database=dlite;mode=append") opts = self.options - opts.setdefault('password', None) - self.writable = False if opts.mode == 'r' else True + opts.setdefault("password", None) + self.writable = False if opts.mode == "r" else True # Connect to existing database - print(' host:', uri) - print(' user:', opts.user) - print(' database:', opts.database) - #print(' password:', opts.password) - self.conn = psycopg2.connect(host=uri, database=opts.database, - user=opts.user, password=opts.password) + print(" host:", uri) + print(" user:", opts.user) + print(" database:", opts.database) + # print(' password:', opts.password) + self.conn = psycopg2.connect( + host=uri, + database=opts.database, + user=opts.user, + password=opts.password, + ) # Open a cursor to perform database operations self.cur = self.conn.cursor() @@ -92,11 +98,12 @@ def close(self): def load(self, uuid): """Loads `uuid` from current storage and return it as a new instance.""" uuid = dlite.get_uuid(uuid) - q = sql.SQL('SELECT meta FROM uuidtable WHERE uuid = %s') + q = sql.SQL("SELECT meta FROM uuidtable WHERE uuid = %s") self.cur.execute(q, [uuid]) - metaid, = self.cur.fetchone() - q = sql.SQL('SELECT * FROM {} WHERE uuid = %s').format( - sql.Identifier(metaid)) + (metaid,) = self.cur.fetchone() + q = sql.SQL("SELECT * FROM {} WHERE uuid = %s").format( + sql.Identifier(metaid) + ) self.cur.execute(q, [uuid]) tokens = self.cur.fetchone() uuid_, uri, metaid_, dims = tokens[:4] @@ -114,13 +121,13 @@ def load(self, uuid): inst = dlite.Instance.from_metaid(metaid, dims, uri) - for i, p in enumerate(inst.meta['properties']): + for i, p in enumerate(inst.meta["properties"]): inst.set_property(p.name, values[i]) # The uuid will be wrong for data instances, so override it if not inst.is_metameta: d = inst.asdict() - d['uuid'] = uuid + d["uuid"] = uuid inst = instance_from_dict(d) return inst @@ -130,18 +137,23 @@ def save(self, inst): # Save to metadata table if not self.table_exists(inst.meta.uri): self.table_create(inst.meta, inst.dimensions.values()) - colnames = ['uuid', 'uri', 'meta', 'dims'] + [ - p.name for p in inst.meta['properties']] - q = sql.SQL('INSERT INTO {0} ({1}) VALUES ({2});').format( + colnames = ["uuid", "uri", "meta", "dims"] + [ + p.name for p in inst.meta["properties"] + ] + q = sql.SQL("INSERT INTO {0} ({1}) VALUES ({2});").format( sql.Identifier(inst.meta.uri), - sql.SQL(', ').join(map(sql.Identifier, colnames)), - (sql.Placeholder() * len(colnames)).join(', ')) - values = [inst.uuid, - inst.uri, - inst.meta.uri, - list(inst.dimensions.values()), - ] + [dlite.standardise(v, inst.get_property_descr(k), asdict=False) - for k, v in inst.properties.items()] + sql.SQL(", ").join(map(sql.Identifier, colnames)), + (sql.Placeholder() * len(colnames)).join(", "), + ) + values = [ + inst.uuid, + inst.uri, + inst.meta.uri, + list(inst.dimensions.values()), + ] + [ + dlite.standardise(v, inst.get_property_descr(k), asdict=False) + for k, v in inst.properties.items() + ] try: self.cur.execute(q, values) except psycopg2.IntegrityError: @@ -149,49 +161,54 @@ def save(self, inst): return # Save to uuidtable - if not self.table_exists('uuidtable'): + if not self.table_exists("uuidtable"): self.uuidtable_create() - q = sql.SQL('INSERT INTO uuidtable (uuid, meta) VALUES (%s, %s);') + q = sql.SQL("INSERT INTO uuidtable (uuid, meta) VALUES (%s, %s);") self.cur.execute(q, [inst.uuid, inst.meta.uri]) self.conn.commit() def table_exists(self, table_name): """Returns true if a table named `table_name` exists.""" self.cur.execute( - 'SELECT EXISTS(SELECT * FROM information_schema.tables ' - 'WHERE table_name=%s);', (table_name, )) + "SELECT EXISTS(SELECT * FROM information_schema.tables " + "WHERE table_name=%s);", + (table_name,), + ) return self.cur.fetchone()[0] def table_create(self, meta, dims=None): """Creates a table for storing instances of `meta`.""" table_name = meta.uri if self.table_exists(table_name): - raise ValueError('Table already exists: %r' % table_name) + raise ValueError("Table already exists: %r" % table_name) if dims: dims = list(dims) cols = [ - 'uuid char(36) PRIMARY KEY', - 'uri varchar', - 'meta varchar', - 'dims integer[%d]' % meta.ndimensions + "uuid char(36) PRIMARY KEY", + "uri varchar", + "meta varchar", + "dims integer[%d]" % meta.ndimensions, ] - for p in meta['properties']: + for p in meta["properties"]: decl = f'"{p.name}" {to_pgtype(p.type)}' if len(p.dims): - decl += '[]' * len(p.dims) + decl += "[]" * len(p.dims) cols.append(decl) - q = sql.SQL('CREATE TABLE {} (%s);' % - ', '.join(cols)).format(sql.Identifier(meta.uri)) + q = sql.SQL("CREATE TABLE {} (%s);" % ", ".join(cols)).format( + sql.Identifier(meta.uri) + ) self.cur.execute(q) self.conn.commit() def uuidtable_create(self): """Creates the uuidtable - a table mapping all uuid's to their metadata uri.""" - q = sql.SQL('CREATE TABLE uuidtable (' - 'uuid char(36) PRIMARY KEY, ' - 'meta varchar' - ');') + q = sql.SQL( + "CREATE TABLE uuidtable (" + "uuid char(36) PRIMARY KEY, " + "meta varchar" + ");" + ) self.cur.execute(q) self.conn.commit() @@ -210,14 +227,14 @@ def queue(self, pattern): ere = "(?:" + match.groups()[0] if match else pcre # Use the ERE for pattern matching in the PostgreSQL query - q = sql.SQL('SELECT uuid from uuidtable WHERE uuid ~ %s;') - self.cur.execute(q, (ere, )) + q = sql.SQL("SELECT uuid from uuidtable WHERE uuid ~ %s;") + self.cur.execute(q, (ere,)) else: - q = sql.SQL('SELECT uuid from uuidtable;') + q = sql.SQL("SELECT uuid from uuidtable;") self.cur.execute(q) tokens = self.cur.fetchone() while tokens: - uuid, = tokens + (uuid,) = tokens yield uuid tokens = self.cur.fetchone() diff --git a/storages/python/python-storage-plugins/pyrdf.py b/storages/python/python-storage-plugins/pyrdf.py index a4a0626f0..4de4550c4 100644 --- a/storages/python/python-storage-plugins/pyrdf.py +++ b/storages/python/python-storage-plugins/pyrdf.py @@ -47,7 +47,8 @@ def open(self, location: str, options: "Optional[str]" = None) -> None: self.location = location self.format = ( self.options.format - if "format" in self.options else guess_format(location) + if "format" in self.options + else guess_format(location) ) self.graph = rdflib.Graph() if self.options.mode in "ra": @@ -82,7 +83,8 @@ def save(self, inst: dlite.Instance) -> None: base_uri=self.options.get("base_uri"), base_prefix=self.options.get("base_prefix"), include_meta=( - dlite.asbool(self.options) if "include_meta" in self.options + dlite.asbool(self.options) + if "include_meta" in self.options else None ), ) diff --git a/storages/python/python-storage-plugins/redis.py b/storages/python/python-storage-plugins/redis.py index a2d068457..a2d48454a 100644 --- a/storages/python/python-storage-plugins/redis.py +++ b/storages/python/python-storage-plugins/redis.py @@ -33,7 +33,9 @@ def open(self, location: str, options=None): transparently encrypt all instances before sending them to Redis. Generate the key with `crystallography.fernet.generate_key()`. """ - opts = Options(options, defaults="port=6379;socket_keepalive=true;db=0") + opts = Options( + options, defaults="port=6379;socket_keepalive=true;db=0" + ) # Pop out options passed to redis.set() self.setopts = { @@ -76,6 +78,7 @@ def load(self, id: str): raise dlite.DLiteError(f"No such instance redis storage: {uuid}") if self.fernet_key: from cryptography.fernet import Fernet + f = Fernet(self.fernet_key.encode()) data = f.decrypt(data) return dlite.Instance.from_bson(data) @@ -89,6 +92,7 @@ def save(self, inst: dlite.Instance): data = bytes(inst.asbson()) if self.fernet_key: from cryptography.fernet import Fernet + key = self.fernet_key.encode() f = Fernet(self.fernet_key) data = f.encrypt(data) diff --git a/storages/python/python-storage-plugins/template.py b/storages/python/python-storage-plugins/template.py index 4599b39b1..7e1b10402 100644 --- a/storages/python/python-storage-plugins/template.py +++ b/storages/python/python-storage-plugins/template.py @@ -17,6 +17,7 @@ class template(dlite.DLiteStorageBase): specifications. This is the default. - "jinja": Use jinja. See https://jinja.palletsprojects.com/ """ + def open(self, location, options=None): """Opens `location`.""" self.options = Options(options, defaults="engine=format") @@ -37,8 +38,9 @@ def save(self, inst): j2_template = Template(template) data = j2_template.render(inst.properties) else: - raise ValueError("The 'engine' option must be either \"format\" " - "or \"jinja\"") + raise ValueError( + "The 'engine' option must be either \"format\" " 'or "jinja"' + ) with open(self.location, "w", encoding="utf8") as f: f.write(data) diff --git a/storages/python/python-storage-plugins/yaml.py b/storages/python/python-storage-plugins/yaml.py index 56dfabdf4..5c24c7a5a 100644 --- a/storages/python/python-storage-plugins/yaml.py +++ b/storages/python/python-storage-plugins/yaml.py @@ -14,6 +14,7 @@ class yaml(dlite.DLiteStorageBase): """DLite storage plugin for YAML.""" + _pyyaml = pyyaml # Keep a reference to pyyaml to have it during shutdown def open(self, location: str, options=None): @@ -46,7 +47,8 @@ def open(self, location: str, options=None): self._data = data self.single = ( - "properties" in self._data if self.options.single == "auto" + "properties" in self._data + if self.options.single == "auto" else dlite.asbool(self.options.single) )