diff --git a/.github/workflows/gh-ci.yaml b/.github/workflows/gh-ci.yaml index e464321..35fdd24 100644 --- a/.github/workflows/gh-ci.yaml +++ b/.github/workflows/gh-ci.yaml @@ -49,7 +49,7 @@ jobs: - name: install package deps run: | - micromamba install numpy scipy mrcfile pytest pytest-cov codecov + micromamba install numpy scipy mrcfile pytest pytest-cov codecov openvdb - name: check install run: | diff --git a/AUTHORS b/AUTHORS index 78d26f7..922488c 100644 --- a/AUTHORS +++ b/AUTHORS @@ -29,3 +29,4 @@ Contributors: * Andrés Montoya (logo) * Rich Waldo * Pradyumn Prasad +* Shreejan Dolai diff --git a/CHANGELOG b/CHANGELOG index b3fb690..dad3361 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -12,12 +12,20 @@ The rules for this file: use tabs but use spaces for formatting * accompany each entry with github issue/PR number (Issue #xyz) +------------------------------------------------------------------------------ +01/16/2026 IAlibay, ollyfutur, conradolandia, orbeckst, PlethoraChutney, + Pradyumn-cloud, spyke7 ------------------------------------------------------------------------------- -??/??/???? orbeckst +??/??/???? orbeckst, spyke7 - * 1.1.1 + * 1.2.0 - Fixes + Enhancements + + * Added openVDB format exports (Issue #141, PR #148) + + + Fixes 01/22/2026 IAlibay, ollyfutur, conradolandia, orbeckst, PlethoraChutney, @@ -35,6 +43,7 @@ The rules for this file: Enhancements + * `Grid` now accepts binary operations with any operand that can be * `Grid` now accepts binary operations with any operand that can be broadcasted to the grid's shape according to `numpy` broadcasting rules (PR #142) diff --git a/doc/source/gridData/formats.rst b/doc/source/gridData/formats.rst index 0680ac8..a44a15d 100644 --- a/doc/source/gridData/formats.rst +++ b/doc/source/gridData/formats.rst @@ -29,6 +29,7 @@ small number of file formats is directly supported. :mod:`~gridData.gOpenMol` gOpenMol_ plt x :mod:`~gridData.mrc` CCP4_ ccp4,mrc x x subset implemented :class:`~gridData.core.Grid` pickle pickle x x standard Python pickle of the Grid class + :mod:`~gridData.OpenVDB` OpenVDB_ vdb x implemented for Blender visualization ============================ ========== ========= ===== ===== ========================================= @@ -39,6 +40,7 @@ small number of file formats is directly supported. .. _OpenDX: http://www.opendx.org/ .. _gOpenMol: http://www.csc.fi/gopenmol/ .. _CCP4: http://www.ccpem.ac.uk/mrc_format/mrc2014.php +.. _OpenVDB: https://www.openvdb.org/ Format-specific modules @@ -50,3 +52,4 @@ Format-specific modules formats/OpenDX formats/gOpenMol formats/mrc + formats/OpenVDB diff --git a/doc/source/gridData/formats/OpenVDB.rst b/doc/source/gridData/formats/OpenVDB.rst new file mode 100644 index 0000000..4f457ca --- /dev/null +++ b/doc/source/gridData/formats/OpenVDB.rst @@ -0,0 +1,2 @@ +.. automodule:: gridData.OpenVDb + diff --git a/gridData/OpenVDB.py b/gridData/OpenVDB.py new file mode 100644 index 0000000..f7095d7 --- /dev/null +++ b/gridData/OpenVDB.py @@ -0,0 +1,255 @@ +r""" +:mod:`~gridData.OpenVDB` --- routines to write OpenVDB files +============================================================= + +The `OpenVDB format`_ is used by Blender_ and other VFX software for +volumetric data. + +.. _`OpenVDB format`: https://www.openvdb.org +.. _Blender: https://www.blender.org/ + +This module uses the openvdb_ library to write OpenVDB files. + +.. _openvdb: https://github.com/AcademySoftwareFoundation/openvdb + +.. Note:: This module implements a simple writer for 3D regular grids, + sufficient to export density data for visualization in Blender_. + See the `Blender volume docs`_ for details on importing VDB files. + +.. _`Blender volume docs`: https://docs.blender.org/manual/en/latest/modeling/volumes/introduction.html + +The OpenVDB format uses a sparse tree structure to efficiently store +volumetric data. It is the native format for Blender's volume system. + + +Writing OpenVDB files +--------------------- + +If you have a :class:`~gridData.core.Grid` object, you can write it to +OpenVDB format:: + + from gridData import Grid + g = Grid("data.dx") + g.export("data.vdb") + +This will create a file that can be imported directly into Blender +(File -> Import -> OpenVDB) or (shift+A -> Volume -> Import OpenVDB). See `importing VDB in Blender`_ for details. + +.. _`importing VDB in Blender`: https://docs.blender.org/manual/en/latest/modeling/geometry_nodes/input/import/vdb.html + + +Building an OpenVDB field from a numpy array +--------------------------------------------- + +If you want to create VDB files without using the Grid class, +you can directly use the OpenVDB field API. This is useful +for custom workflows or when integrating with other libraries. + +Requires: + +grid + numpy 3D array +origin + cartesian coordinates of the center of the (0,0,0) grid cell +delta + n x n array with the length of a grid cell along each axis + +Example:: + + from gridData import Grid + + g = Grid("data.dx") + g.export("data.vdb") + + +Classes and functions +--------------------- + +""" + +import numpy +import warnings + +try: + import openvdb as vdb + +except ImportError: + vdb = None + + +class OpenVDBField(object): + """OpenVDB field object for writing volumetric data. + + This class provides a simple interface to write 3D grid data to + OpenVDB format, which can be imported into Blender and other + VFX software. + + The field object holds grid data and metadata, and can write it + to a .vdb file. + + Example + ------- + Create a field and write it:: + + import gridData.OpenVDB as OpenVDB + + vdb_field = OpenVDB.OpenVDBField('density') + vdb_field.write('output.vdb') + + Or use directly from Grid:: + + g = Grid(...) + g.export('output.vdb', format='vdb') + + """ + + def __init__( + self, + grid=None, + origin=None, + delta=None, + name="density", + tolerance=None, + metadata=None, + ): + """Initialize an OpenVDB field. + + Parameters + ---------- + grid : numpy.ndarray + 3D numpy array with the data + origin : numpy.ndarray + Coordinates of the center of grid cell [0,0,0] + delta : numpy.ndarray + Grid spacing (can be 1D array or diagonal matrix) + name : str + Name of the grid (will be visible in Blender), default 'density' + tolerance : float (optional) + Values below this tolerance are treated as background (sparse), + default None + metadata : dict (optional) + Additional metadata to embed in the VDB file. + + Raises + ------ + ImportError + If openvdb is not installed + ValueError + If grid is not 3D, or if delta is not 1D/2D or describes + non-orthorhombic cell + + """ + if vdb is None: + raise ImportError( + "openvdb is required to write VDB files. " + "Install it with: conda install -c conda-forge openvdb" + ) + self.name = name + self.tolerance = tolerance + if metadata is not None: + self.metadata = metadata + else: + self.metadata = {} + + if grid is not None: + self._populate(grid, origin, delta) + else: + self.grid = None + self.origin = None + self.delta = None + + def _populate(self, grid, origin, delta): + """Populate the field with grid data. + + Parameters + ---------- + grid : numpy.ndarray + 3D numpy array with the data + origin : numpy.ndarray + Coordinates of the center of grid cell [0,0,0] + delta : numpy.ndarray + Grid spacing (can be 1D array or diagonal matrix) + + Raises + ------ + ValueError + If grid is not 3D, or if delta is not 1D/2D or describes + non-orthorhombic cell + + """ + self.grid = numpy.asarray(grid) + if grid.ndim != 3: + raise ValueError(f"OpenVDB only supports 3D grids, got {grid.ndim}D") + + self.grid = numpy.ascontiguousarray(self.grid) + + self.origin = numpy.asarray(origin) + + # Handle delta: could be 1D array or diagonal matrix + delta = numpy.asarray(delta) + if delta.ndim == 2: + if delta.shape != (3, 3): + raise ValueError("delta as a matrix must be 3x3") + + if not numpy.allclose(delta, numpy.diag(numpy.diag(delta))): + raise ValueError("Non-orthorhombic cells are not supported") + + self.delta = numpy.diag(delta) + + elif delta.ndim == 1: + if len(delta) != 3: + raise ValueError("delta must have length-3 for 3D grids") + self.delta = delta + + else: + raise ValueError( + "delta must be either a length-3 vector or a 3x3 diagonal matrix" + ) + + def write(self, filename): + """Write the field to an OpenVDB file. + + Parameters + ---------- + filename : str + Output filename (should end in .vdb) + + Notes + ----- + Limitations in OpenVDB can lead to loss of precision. If the input + data is not of type float32, it will be converted to FloatGrid which is float32. + + """ + + if self.grid.dtype == numpy.bool_ or self.grid.dtype == bool: + vdb_grid = vdb.BoolGrid() + use_tolerance = False + + else: + vdb_grid = vdb.FloatGrid() + if self.tolerance == None or self.tolerance == 0: + use_tolerance = False + else: + use_tolerance = True + + vdb_grid.name = self.name + + vdb_grid.transform = vdb.createLinearTransform() + vdb_grid.transform.preScale(self.delta.tolist()) + vdb_grid.transform.postTranslate(self.origin.tolist()) + + if self.metadata: + for key, val in self.metadata.items(): + try: + vdb_grid[key] = val + except (TypeError, ValueError) as e: + warnings.warn(f"Could not set metadata '{key}': {e}", UserWarning) + + if use_tolerance: + vdb_grid.copyFromArray(self.grid, tolerance=self.tolerance) + vdb_grid.prune() + else: + vdb_grid.copyFromArray(self.grid) + vdb_grid.prune(tolerance=False) + + vdb.write(filename, grids=[vdb_grid]) diff --git a/gridData/__init__.py b/gridData/__init__.py index bfe79a1..c5aaa0e 100644 --- a/gridData/__init__.py +++ b/gridData/__init__.py @@ -110,8 +110,9 @@ from . import OpenDX from . import gOpenMol from . import mrc +from . import OpenVDB -__all__ = ['Grid', 'OpenDX', 'gOpenMol', 'mrc'] +__all__ = ['Grid', 'OpenDX', 'gOpenMol', 'mrc', 'OpenVDB'] from importlib.metadata import version __version__ = version("GridDataFormats") diff --git a/gridData/core.py b/gridData/core.py index 035d769..42882f7 100644 --- a/gridData/core.py +++ b/gridData/core.py @@ -45,6 +45,7 @@ from . import OpenDX from . import gOpenMol from . import mrc +from . import OpenVDB def _grid(x): @@ -211,27 +212,36 @@ class Grid(object): """ #: Default format for exporting with :meth:`export`. - default_format = 'DX' - - def __init__(self, grid=None, edges=None, origin=None, delta=None, - metadata=None, interpolation_spline_order=3, - file_format=None, assume_volumetric=False): + default_format = "DX" + + def __init__( + self, + grid=None, + edges=None, + origin=None, + delta=None, + metadata=None, + interpolation_spline_order=3, + file_format=None, + assume_volumetric=False, + ): # file formats are guessed from extension == lower case key self._exporters = { - 'DX': self._export_dx, - 'PKL': self._export_python, - 'PICKLE': self._export_python, # compatibility - 'PYTHON': self._export_python, # compatibility - 'MRC': self._export_mrc, + "DX": self._export_dx, + "PKL": self._export_python, + "PICKLE": self._export_python, # compatibility + "PYTHON": self._export_python, # compatibility + "VDB": self._export_vdb, + "MRC": self._export_mrc, } self._loaders = { - 'CCP4': self._load_mrc, - 'MRC': self._load_mrc, - 'DX': self._load_dx, - 'PLT': self._load_plt, - 'PKL': self._load_python, - 'PICKLE': self._load_python, # compatibility - 'PYTHON': self._load_python, # compatibility + "CCP4": self._load_mrc, + "MRC": self._load_mrc, + "DX": self._load_dx, + "PLT": self._load_plt, + "PKL": self._load_python, + "PICKLE": self._load_python, # compatibility + "PYTHON": self._load_python, # compatibility } self.metadata = metadata if metadata is not None else {} @@ -248,7 +258,7 @@ def __init__(self, grid=None, edges=None, origin=None, delta=None, # Can we read this as a file? # Use str(x) to work with py.path.LocalPath and pathlib.Path instances # even for Python < 3.6 - with open(str(grid), 'rb'): + with open(str(grid), "rb"): pass except (OSError, IOError): # no, this is probably an array-like thingy @@ -258,7 +268,11 @@ def __init__(self, grid=None, edges=None, origin=None, delta=None, filename = str(grid) if filename is not None: - self.load(filename, file_format=file_format, assume_volumetric=assume_volumetric) + self.load( + filename, + file_format=file_format, + assume_volumetric=assume_volumetric, + ) else: self._load(grid, edges, metadata, origin, delta) @@ -373,21 +387,32 @@ def resample_factor(self, factor): raise ValueError("Factor must be positive") # Determine current spacing spacing = (numpy.array(self._max_edges()) - numpy.array(self._min_edges())) / ( - -1 + numpy.array(self._len_edges())) + -1 + numpy.array(self._len_edges()) + ) # First guess at the new spacing is inversely related to the # magnification factor. newspacing = spacing / float(factor) smidpoints = numpy.array(self._midpoints()) # We require that the new spacing result in an even subdivision of the # existing midpoints - newspacing = (smidpoints[:, -1] - smidpoints[:, 0]) / (numpy.maximum( - 1, numpy.floor((smidpoints[:, -1] - smidpoints[:, 0]) / newspacing))) + newspacing = (smidpoints[:, -1] - smidpoints[:, 0]) / ( + numpy.maximum( + 1, numpy.floor((smidpoints[:, -1] - smidpoints[:, 0]) / newspacing) + ) + ) # How many edge points should there be? It is the number of intervals # between midpoints + 2 - edgelength = 2 + \ - numpy.round((smidpoints[:, -1] - smidpoints[:, 0]) / newspacing) - edges = [numpy.linspace(start, stop, num=int(N), endpoint=True) for (start, stop, N) in zip( - smidpoints[:, 0] - 0.5 * newspacing, smidpoints[:, -1] + 0.5 * newspacing, edgelength)] + edgelength = 2 + numpy.round( + (smidpoints[:, -1] - smidpoints[:, 0]) / newspacing + ) + edges = [ + numpy.linspace(start, stop, num=int(N), endpoint=True) + for (start, stop, N) in zip( + smidpoints[:, 0] - 0.5 * newspacing, + smidpoints[:, -1] + 0.5 * newspacing, + edgelength, + ) + ] return self.resample(edges) def _update(self): @@ -404,8 +429,9 @@ def _update(self): spline interpolation function that can generated a value for coordinate """ - self.delta = numpy.array(list( - map(lambda e: (e[-1] - e[0]) / (len(e) - 1), self.edges))) + self.delta = numpy.array( + list(map(lambda e: (e[-1] - e[0]) / (len(e) - 1), self.edges)) + ) self.midpoints = self._midpoints(self.edges) self.origin = numpy.array(list(map(lambda m: m[0], self.midpoints))) if self.__interpolated is not None: @@ -488,7 +514,7 @@ def _guess_format(self, filename, file_format=None, export=True): available = self._loaders if file_format is None: splitted = os.path.splitext(filename) - if splitted[1][1:] in ('gz', ): + if splitted[1][1:] in ("gz",): file_format = os.path.splitext(splitted[0])[1][1:] else: file_format = splitted[1][1:] @@ -498,26 +524,22 @@ def _guess_format(self, filename, file_format=None, export=True): if file_format not in available: raise ValueError( "File format {} not available, choose one of {}".format( - file_format, available.keys())) + file_format, available.keys() + ) + ) return file_format def _get_exporter(self, filename, file_format=None): - return self._exporters[self._guess_format(filename, - file_format=file_format, - export=True)] + return self._exporters[ + self._guess_format(filename, file_format=file_format, export=True) + ] def _get_loader(self, filename, file_format=None): - return self._loaders[self._guess_format(filename, - file_format=file_format, - export=False)] - - def _load( - self, - grid=None, - edges=None, - metadata=None, - origin=None, - delta=None): + return self._loaders[ + self._guess_format(filename, file_format=file_format, export=False) + ] + + def _load(self, grid=None, edges=None, metadata=None, origin=None, delta=None): if edges is not None: # set up from histogramdd-type data self.grid = numpy.asanyarray(grid) @@ -529,19 +551,21 @@ def _load( delta = numpy.asanyarray(delta) if len(origin) != grid.ndim: raise TypeError( - "Dimension of origin is not the same as grid dimension.") + "Dimension of origin is not the same as grid dimension." + ) if delta.shape == () and numpy.isreal(delta): delta = numpy.ones(grid.ndim) * delta elif delta.ndim > 1: - raise NotImplementedError( - "Non-rectangular grids are not supported.") + raise NotImplementedError("Non-rectangular grids are not supported.") elif len(delta) != grid.ndim: - raise TypeError("delta should be scalar or array-like of" - "len(grid.ndim)") + raise TypeError( + "delta should be scalar or array-like of" "len(grid.ndim)" + ) # note that origin is CENTER so edges must be shifted by -0.5*delta - self.edges = [origin[dim] + - (numpy.arange(m + 1) - 0.5) * delta[dim] - for dim, m in enumerate(grid.shape)] + self.edges = [ + origin[dim] + (numpy.arange(m + 1) - 0.5) * delta[dim] + for dim, m in enumerate(grid.shape) + ] self.grid = numpy.asanyarray(grid) self._update() else: @@ -550,7 +574,9 @@ def _load( "Grid(grid=, edges=) or " "Grid(grid=, origin=(x0, y0, z0), delta=(dx, dy, dz)):\n" "grid={0} edges={1} origin={2} delta={3}".format( - grid, edges, origin, delta)) + grid, edges, origin, delta + ) + ) # NOTE: keep loader kwargs in sync between load() and __init__() def load(self, filename, file_format=None, assume_volumetric=False): @@ -569,11 +595,9 @@ def load(self, filename, file_format=None, assume_volumetric=False): loader(filename, assume_volumetric=assume_volumetric) def _load_python(self, filename, **kwargs): - with open(filename, 'rb') as f: + with open(filename, "rb") as f: saved = pickle.load(f) - self._load(grid=saved['grid'], - edges=saved['edges'], - metadata=saved['metadata']) + self._load(grid=saved["grid"], edges=saved["edges"], metadata=saved["metadata"]) def _load_mrc(self, filename, assume_volumetric=False, **kwargs): """Initializes Grid from a MRC/CCP4 file.""" @@ -598,7 +622,9 @@ def _load_plt(self, filename, **kwargs): grid, edges = g.histogramdd() self._load(grid=grid, edges=edges, metadata=self.metadata) - def export(self, filename, file_format=None, type=None, typequote='"'): + def export( + self, filename, file_format=None, type=None, typequote='"', tolerance=None + ): """export density to file using the given format. The format can also be deduced from the suffix of the filename @@ -616,6 +642,8 @@ def export(self, filename, file_format=None, type=None, typequote='"'): pickle pickle (use :meth:`Grid.load` to restore); :meth:`Grid.save` is simpler than ``export(format='python')``. + vdb + :mod:`OpenVDB` Parameters ---------- @@ -641,10 +669,14 @@ def export(self, filename, file_format=None, type=None, typequote='"'): .. versionadded:: 0.5.0 + tolerance : float (optional) + For VDB, values below this tolerance are treated as background (sparse), + default None + """ filename = str(filename) exporter = self._get_exporter(filename, file_format=file_format) - exporter(filename, type=type, typequote=typequote) + exporter(filename, type=type, typequote=typequote, tolerance=tolerance) # note: the _export_FORMAT() methods all take the filename as a mandatory # argument. They can process kwargs but they are not required to do @@ -657,7 +689,7 @@ def _export_python(self, filename, **kwargs): is sufficient to recreate the grid object with ``__init__()``. """ data = dict(grid=self.grid, edges=self.edges, metadata=self.metadata) - with open(filename, 'wb') as f: + with open(filename, "wb") as f: pickle.dump(data, f, pickle.HIGHEST_PROTOCOL) def _export_dx(self, filename, type=None, typequote='"', **kwargs): @@ -672,53 +704,77 @@ def _export_dx(self, filename, type=None, typequote='"', **kwargs): """ root, ext = os.path.splitext(filename) - filename = root + '.dx' + filename = root + ".dx" comments = [ - 'OpenDX density file written by gridDataFormats.Grid.export()', - 'File format: http://opendx.sdsc.edu/docs/html/pages/usrgu068.htm#HDREDF', - 'Data are embedded in the header and tied to the grid positions.', - 'Data is written in C array order: In grid[x,y,z] the axis z is fastest', - 'varying, then y, then finally x, i.e. z is the innermost loop.'] + "OpenDX density file written by gridDataFormats.Grid.export()", + "File format: http://opendx.sdsc.edu/docs/html/pages/usrgu068.htm#HDREDF", + "Data are embedded in the header and tied to the grid positions.", + "Data is written in C array order: In grid[x,y,z] the axis z is fastest", + "varying, then y, then finally x, i.e. z is the innermost loop.", + ] # write metadata in comments section if self.metadata: - comments.append('Meta data stored with the python Grid object:') + comments.append("Meta data stored with the python Grid object:") for k in self.metadata: - comments.append(' ' + str(k) + ' = ' + str(self.metadata[k])) - comments.append( - '(Note: the VMD dx-reader chokes on comments below this line)') + comments.append(" " + str(k) + " = " + str(self.metadata[k])) + comments.append("(Note: the VMD dx-reader chokes on comments below this line)") components = dict( - positions=OpenDX.gridpositions(1, self.grid.shape, self.origin, - self.delta), + positions=OpenDX.gridpositions(1, self.grid.shape, self.origin, self.delta), connections=OpenDX.gridconnections(2, self.grid.shape), data=OpenDX.array(3, self.grid, type=type, typequote=typequote), ) - dx = OpenDX.field('density', components=components, comments=comments) - if ext == '.gz': + dx = OpenDX.field("density", components=components, comments=comments) + if ext == ".gz": filename = root + ext dx.write(filename) - + + def _export_vdb(self, filename, tolerance=None, **kwargs): + """Export the density grid to an OpenVDB file. + + The file format is compatible with Blender's volume system. + Only 3D grids are supported. + + For the file format see https://www.openvdb.org + """ + if self.grid.ndim != 3: + raise ValueError( + f"OpenVDB export requires a 3D grid, got {self.grid.ndim}D" + ) + + grid_name = self.metadata.get("name", "density") + + vdb_field = OpenVDB.OpenVDBField( + grid=self.grid, + origin=self.origin, + delta=self.delta, + name=grid_name, + tolerance=tolerance, + metadata=self.metadata, + ) + vdb_field.write(filename) + def _export_mrc(self, filename, **kwargs): """Export the density grid to an MRC/CCP4 file. - + The MRC2014 file format is used via the mrcfile library. - + Parameters ---------- filename : str Output filename **kwargs Additional keyword arguments (currently ignored) - + Notes ----- * Only orthorhombic unit cells are supported * If the Grid was loaded from an MRC file, the original header information (including axis ordering) is preserved * For new grids, standard ordering (mapc=1, mapr=2, maps=3) is used - + .. versionadded:: 1.1.0 """ # Create MRC object and populate with Grid data @@ -727,11 +783,11 @@ def _export_mrc(self, filename, **kwargs): mrc_file.delta = numpy.diag(self.delta) mrc_file.origin = self.origin mrc_file.rank = 3 - + # Transfer header if it exists (preserves axis ordering and other metadata) - if hasattr(self, '_mrc_header'): + if hasattr(self, "_mrc_header"): mrc_file.header = self._mrc_header - + # Write to file mrc_file.write(filename) @@ -797,7 +853,9 @@ def check_compatible(self, other): ) else: try: - is_compatible = numpy.broadcast(self.grid, other).shape == self.grid.shape + is_compatible = ( + numpy.broadcast(self.grid, other).shape == self.grid.shape + ) except ValueError: is_compatible = False if not is_compatible: @@ -805,7 +863,8 @@ def check_compatible(self, other): "The argument cannot be arithmetically combined with the grid. " "It must be broadcastable to the grid's shape or a `Grid` with identical edges. " "Use `Grid.resample(other.edges)` to make a new grid that is " - "compatible with `other`.") + "compatible with `other`." + ) return True def _interpolationFunctionFactory(self, spline_order=None, cval=None): @@ -859,26 +918,28 @@ def interpolatedF(*coordinates): >>> FF = _interpolationFunction(XX,YY,ZZ) """ _coordinates = numpy.array( - [_transform(coordinates[i], x0[i], dx[i]) for i in range(len( - coordinates))]) - return scipy.ndimage.map_coordinates(coeffs, - _coordinates, - prefilter=False, - mode='constant', - cval=cval) + [ + _transform(coordinates[i], x0[i], dx[i]) + for i in range(len(coordinates)) + ] + ) + return scipy.ndimage.map_coordinates( + coeffs, _coordinates, prefilter=False, mode="constant", cval=cval + ) + return interpolatedF def __eq__(self, other): if not isinstance(other, Grid): return False - return numpy.all( - other.grid == self.grid) and numpy.all( - other.origin == self.origin) and numpy.all( - numpy.all( - other_edge == self_edge) for other_edge, - self_edge in zip( - other.edges, - self.edges)) + return ( + numpy.all(other.grid == self.grid) + and numpy.all(other.origin == self.origin) + and numpy.all( + numpy.all(other_edge == self_edge) + for other_edge, self_edge in zip(other.edges, self.edges) + ) + ) def __ne__(self, other): return not self.__eq__(other) @@ -905,11 +966,7 @@ def __floordiv__(self, other): def __pow__(self, other): self.check_compatible(other) - return self.__class__( - numpy.power( - self.grid, - _grid(other)), - edges=self.edges) + return self.__class__(numpy.power(self.grid, _grid(other)), edges=self.edges) def __radd__(self, other): self.check_compatible(other) @@ -933,18 +990,14 @@ def __rfloordiv__(self, other): def __rpow__(self, other): self.check_compatible(other) - return self.__class__( - numpy.power( - _grid(other), - self.grid), - edges=self.edges) + return self.__class__(numpy.power(_grid(other), self.grid), edges=self.edges) def __repr__(self): try: bins = self.grid.shape except AttributeError: bins = "no" - return '<{0} with {1!r} bins>'.format(self.__class__, bins) + return "<{0} with {1!r} bins>".format(self.__class__, bins) def ndmeshgrid(*arrs): diff --git a/gridData/tests/test_vdb.py b/gridData/tests/test_vdb.py new file mode 100644 index 0000000..ff2dc7f --- /dev/null +++ b/gridData/tests/test_vdb.py @@ -0,0 +1,284 @@ +import numpy as np +from numpy.testing import assert_allclose + +import pytest +from unittest.mock import patch + +import gridData.OpenVDB +from gridData import Grid + +from . import datafiles + +try: + import openvdb as vdb + + HAS_OPENVDB = True +except ImportError: + HAS_OPENVDB = False + + +@pytest.fixture +def grid345(): + data = np.arange(1, 61, dtype=np.float32).reshape((3, 4, 5)) + g = Grid(data.copy(), origin=np.zeros(3), delta=np.ones(3)) + return data, g + + +@pytest.mark.skipif(not HAS_OPENVDB, reason="openvdb not installed") +class TestVDBWrite: + def test_write_vdb_from_grid(self, tmpdir, grid345): + data, g = grid345 + + outfile = str(tmpdir / "test.vdb") + g.export(outfile, file_format="VDB") + + assert tmpdir.join("test.vdb").exists() + + grids, metadata = vdb.readAll(outfile) + assert len(grids) == 1 + assert grids[0].name == "density" + + grid_vdb = grids[0] + acc = grid_vdb.getAccessor() + + assert grid_vdb.evalActiveVoxelDim() == data.shape + + corners = [ + (0, 0, 0), + (data.shape[0] - 1, data.shape[1] - 1, data.shape[2] - 1), + (1, 2, 3), + ] + for i, j, k in corners: + got = acc.getValue((i, j, k)) + assert got == pytest.approx(float(data[i, j, k])) + + def test_write_vdb_default_grid_name(self, tmpdir, grid345): + data, g = grid345 + g.metadata = {} + + outfile = str(tmpdir / "default_name.vdb") + g.export(outfile) + grids, metadata = vdb.readAll(outfile) + assert grids[0].name == "density" + + def test_write_vdb_autodetect_extension(self, tmpdir, grid345): + data, g = grid345 + + outfile = str(tmpdir / "auto.vdb") + g.export(outfile) + + assert tmpdir.join("auto.vdb").exists() + + def test_write_vdb_with_metadata(self, tmpdir, grid345): + data, g = grid345 + g.metadata["name"] = "test_density" + + outfile = str(tmpdir / "metadata.vdb") + g.export(outfile) + + grids, metadata = vdb.readAll(outfile) + assert grids[0].name == "test_density" + + def test_write_vdb_origin_and_spacing(self, tmpdir): + data = np.ones((4, 4, 4), dtype=np.float32) + origin = np.array([10.0, 20.0, 30.0]) + delta = np.array([0.5, 0.5, 0.5]) + + g = Grid(data, origin=origin, delta=delta) + outfile = str(tmpdir / "transform.vdb") + g.export(outfile) + + grids, metadata = vdb.readAll(outfile) + grid_vdb = grids[0] + + voxel_size = grid_vdb.transform.voxelSize() + + spacing = [voxel_size[0], voxel_size[1], voxel_size[2]] + assert_allclose(spacing, delta, rtol=1e-5) + + def test_write_vdb_from_ccp4(self, tmpdir): + g = Grid(datafiles.CCP4) + outfile = str(tmpdir / "from_ccp4.vdb") + + g.export(outfile, file_format="VDB") + + assert tmpdir.join("from_ccp4.vdb").exists() + grids, metadata = vdb.readAll(outfile) + assert len(grids) == 1 + + def test_vdb_field_direct(self, tmpdir): + data = np.arange(27).reshape(3, 3, 3).astype(np.float32) + + vdb_field = gridData.OpenVDB.OpenVDBField( + data, origin=[0, 0, 0], delta=[1, 1, 1], name="direct_test" + ) + + outfile = str(tmpdir / "direct.vdb") + vdb_field.write(outfile) + + grids, metadata = vdb.readAll(outfile) + assert grids[0].name == "direct_test" + + grid_vdb = grids[0] + assert grid_vdb.evalActiveVoxelDim() == data.shape + acc = grid_vdb.getAccessor() + + assert acc.getValue((0, 0, 0)) == pytest.approx(float(data[0, 0, 0])) + assert acc.getValue((1, 1, 1)) == pytest.approx(float(data[1, 1, 1])) + assert acc.getValue((2, 2, 2)) == pytest.approx(float(data[2, 2, 2])) + + def test_vdb_field_2d_raises(self): + data_2d = np.arange(12).reshape(3, 4) + + with pytest.raises(ValueError, match="3D grids"): + gridData.OpenVDB.OpenVDBField(data_2d, origin=[0, 0], delta=[1, 1]) + + def test_write_vdb_nonuniform_spacing(self, tmpdir): + data = np.ones((3, 3, 3), dtype=np.float32) + delta = np.array([0.5, 1.0, 1.5]) + g = Grid(data, origin=[0, 0, 0], delta=delta) + + outfile = str(tmpdir / "nonuniform.vdb") + g.export(outfile) + assert tmpdir.join("nonuniform.vdb").exists() + + grids, metadata = vdb.readAll(outfile) + grid_vdb = grids[0] + voxel_size = grid_vdb.transform.voxelSize() + + spacing = [voxel_size[0], voxel_size[1], voxel_size[2]] + + assert_allclose(spacing, delta, rtol=1e-5) + + def test_write_vdb_with_delta_matrix(self, tmpdir): + data = np.ones((3, 3, 3), dtype=np.float32) + delta = np.diag([1.0, 2.0, 3.0]) + + vdb_field = gridData.OpenVDB.OpenVDBField( + data, origin=[0, 0, 0], delta=delta, name="matrix_delta" + ) + + outfile = str(tmpdir / "matrix_delta.vdb") + vdb_field.write(outfile) + assert tmpdir.join("matrix_delta.vdb").exists() + + grids, metadata = vdb.readAll(outfile) + grid_vdb = grids[0] + voxel_size = grid_vdb.transform.voxelSize() + + spacing = [voxel_size[0], voxel_size[1], voxel_size[2]] + + assert_allclose(spacing, np.diag(delta), rtol=1e-5) + + def test_write_vdb_sparse_data(self, tmpdir): + data = np.zeros((10, 10, 10), dtype=np.float32) + data[2, 3, 4] = 5.0 + data[7, 8, 9] = 10.0 + + g = Grid(data, origin=[0, 0, 0], delta=[1, 1, 1]) + outfile = str(tmpdir / "sparse.vdb") + g.export(outfile) + + assert tmpdir.join("sparse.vdb").exists() + grids, metadata = vdb.readAll(outfile) + assert len(grids) == 1 + + grid_vdb = grids[0] + acc = grid_vdb.getAccessor() + assert acc.getValue((2, 3, 4)) == pytest.approx(data[2, 3, 4]) + assert acc.getValue((7, 8, 9)) == pytest.approx(data[7, 8, 9]) + + def test_write_vdb_with_zero_tolerance(self, tmpdir): + data = np.ones((3, 3, 3), dtype=np.float32) + g = Grid(data, origin=[0, 0, 0], delta=[1, 1, 1]) + outfile = str(tmpdir / "zero_tolerance.vdb") + + g.export(outfile, tolerance=0) + + assert tmpdir.join("zero_tolerance.vdb").exists() + + def test_vdb_non_orthrhombic_raises(self): + data = np.ones((3, 3, 3), dtype=np.float32) + delta = np.array( + [ + [1, 0.1, 0], + [0, 1, 0], + [0, 0, 1], + ] + ) + + with pytest.raises(ValueError, match="Non-orthorhombic"): + gridData.OpenVDB.OpenVDBField(data, origin=[0, 0, 0], delta=delta) + + def test_delta_matrix_wrong_shape_raises(self): + data = np.ones((3, 3, 3), dtype=np.float32) + origin = [0.0, 0.0, 0.0] + bad_delta = np.eye(2) + + with pytest.raises(ValueError, match="delta as a matrix must be 3x3"): + gridData.OpenVDB.OpenVDBField(data, origin, bad_delta) + + def test_delta_scalar_raises(self): + data = np.ones((3, 3, 3), dtype=np.float32) + origin = [0.0, 0.0, 0.0] + bad_delta = np.array(1.0) + + with pytest.raises( + ValueError, + match="delta must be either a length-3 vector or a 3x3 diagonal matrix", + ): + gridData.OpenVDB.OpenVDBField(data, origin, bad_delta) + + def test_delta_1d_wrong_length_raises(self): + data = np.ones((3, 3, 3), dtype=np.float32) + origin = [0.0, 0.0, 0.0] + bad_delta = np.array([1.0, 2.0]) + + with pytest.raises(ValueError, match="must have length-3"): + gridData.OpenVDB.OpenVDBField(data, origin, bad_delta) + + def test_write_vdb_boolean_grid(self, tmpdir): + data = np.random.random((10, 10, 10)) > 0.5 + + g = Grid(data, origin=[0, 0, 0], delta=[1, 1, 1]) + g.metadata["name"] = "boolean_mask" + + outfile = str(tmpdir / "boolean.vdb") + g.export(outfile) + assert tmpdir.join("boolean.vdb").exists() + + grids, _ = vdb.readAll(outfile) + assert len(grids) == 1 + + grid_vdb = grids[0] + + assert isinstance(grid_vdb, vdb.BoolGrid) + + acc = grid_vdb.getAccessor() + assert acc.getValue((0, 0, 0)) == bool(data[0, 0, 0]) + assert acc.getValue((5, 5, 5)) == bool(data[5, 5, 5]) + + def test_write_vdb_with_tolerance_parameter(self, tmpdir, grid345): + data, g = grid345 + outfile = str(tmpdir / "with_tolerance.vdb") + g.export(outfile, tolerance=0.1) + + assert tmpdir.join("with_tolerance.vdb").exists() + + grids, _ = vdb.readAll(outfile) + grid_vdb = grids[0] + + acc = grid_vdb.getAccessor() + assert acc.getValue((0, 0, 0)) == pytest.approx(float(data[0, 0, 0])) + assert acc.getValue((1, 2, 3)) == pytest.approx(float(data[1, 2, 3])) + + +@pytest.mark.skipif( + not HAS_OPENVDB, reason="Need openvdb to test import error handling" +) +def test_vdb_import_error(): + with patch("gridData.OpenVDB.vdb", None): + with pytest.raises(ImportError, match="openvdb is required"): + gridData.OpenVDB.OpenVDBField( + np.ones((3, 3, 3)), origin=[0, 0, 0], delta=[1, 1, 1] + )