Skip to content

Commit

Permalink
Make submodules private (#28)
Browse files Browse the repository at this point in the history
Prefix submodule names with an underscore to indicate that they're not
intended to be accessed by downstream projects.

Only symbols exposed in the top-level namespace should be considered
public. I don't want to treat the locations of symbols in individual
submodules as part of the API -- I'd prefer to be able to reorganize
things in the future without causing breaking changes.

In the future, we might add public submodules (such as a `tophu.typing`
submodule for custom type annotations) to store public symbols that
don't belong in the top-level namespace. But, for now, every symbol
that's intended to be public should be importable directly from `tophu`.
  • Loading branch information
gmgunter authored Aug 24, 2023
1 parent 484407c commit 1cb3a47
Show file tree
Hide file tree
Showing 9 changed files with 34 additions and 34 deletions.
14 changes: 7 additions & 7 deletions src/tophu/__init__.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from .filter import *
from .io import *
from .multilook import *
from .multiscale import *
from .unwrap import *
from .upsample import *
from .util import *
from ._filter import *
from ._io import *
from ._multilook import *
from ._multiscale import *
from ._unwrap import *
from ._upsample import *
from ._util import *

__version__ = "0.1.0"
File renamed without changes.
6 changes: 3 additions & 3 deletions src/tophu/io.py → src/tophu/_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import rasterio
from numpy.typing import ArrayLike, DTypeLike

from . import util
from ._util import as_tuple_of_int

__all__ = [
"DatasetReader",
Expand Down Expand Up @@ -154,7 +154,7 @@ def __init__(
object.
"""
filepath = Path(filepath)
shape = util.as_tuple_of_int(shape)
shape = as_tuple_of_int(shape)
dtype = np.dtype(dtype)

# Get array size in bytes.
Expand Down Expand Up @@ -298,7 +298,7 @@ def __init__(
# Create the HDF5 file and dataset if they don't already exist.
# If the dataset already exists, make sure its shape & dtype are as
# specified.
shape = util.as_tuple_of_int(shape)
shape = as_tuple_of_int(shape)
dtype = np.dtype(dtype)
with h5py.File(filepath, "a") as f:
dataset = f.require_dataset(
Expand Down
4 changes: 2 additions & 2 deletions src/tophu/multilook.py → src/tophu/_multilook.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import dask.array as da
import numpy as np

from . import util
from ._util import iseven

__all__ = [
"multilook",
Expand Down Expand Up @@ -64,7 +64,7 @@ def multilook(arr: da.Array, nlooks: int | Iterable[int]) -> da.Array:
raise ValueError("number of looks should not exceed array shape")

# Warn if the number of looks along any axis is even-valued.
if any(map(util.iseven, nlooks)):
if any(map(iseven, nlooks)):
warnings.warn(
"one or more components of nlooks is even-valued -- this will result in"
" a phase delay in the multilooked data equivalent to a half-bin shift",
Expand Down
34 changes: 17 additions & 17 deletions src/tophu/multiscale.py → src/tophu/_multiscale.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@
import scipy.signal
from numpy.typing import ArrayLike, NDArray

from . import util
from .filter import bandpass_equiripple_filter
from .io import DatasetReader, DatasetWriter
from .multilook import multilook
from .unwrap import UnwrapCallback
from .upsample import upsample_nearest
from . import _util
from ._filter import bandpass_equiripple_filter
from ._io import DatasetReader, DatasetWriter
from ._multilook import multilook
from ._unwrap import UnwrapCallback
from ._upsample import upsample_nearest

__all__ = [
"multiscale_unwrap",
Expand Down Expand Up @@ -325,7 +325,7 @@ def to_single_chunk(arr: ArrayLike) -> da.Array:
coherence_lores_singleblock = to_single_chunk(coherence_lores)

# Unwrap the downsampled data.
unwrapped_lores, conncomp_lores = util.map_blocks(
unwrapped_lores, conncomp_lores = _util.map_blocks(
unwrap_func,
igram_lores_singleblock,
coherence_lores_singleblock,
Expand Down Expand Up @@ -512,7 +512,7 @@ def _multiscale_unwrap(
# downsampling was requested. This case is functionally equivalent to just making a
# single call to `unwrap_func()`.
if (igram.numblocks == 1) and (downsample_factor == (1, 1)):
return util.map_blocks( # type: ignore[return-value]
return _util.map_blocks( # type: ignore[return-value]
unwrap_func,
igram,
coherence,
Expand All @@ -536,7 +536,7 @@ def _multiscale_unwrap(
)

# Unwrap each tile independently.
unwrapped, conncomp = util.map_blocks(
unwrapped, conncomp = _util.map_blocks(
unwrap_func,
igram,
coherence,
Expand Down Expand Up @@ -585,8 +585,8 @@ def get_tile_dims(
Shape of a typical tile. The last tile along each axis may be smaller.
"""
# Normalize `shape` and `ntiles` into tuples of ints.
shape = util.as_tuple_of_int(shape)
ntiles = util.as_tuple_of_int(ntiles)
shape = _util.as_tuple_of_int(shape)
ntiles = _util.as_tuple_of_int(ntiles)

# Number of dimensions of the partitioned array.
ndim = len(shape)
Expand All @@ -598,18 +598,18 @@ def get_tile_dims(
if any(map(lambda n: n < 1, ntiles)):
raise ValueError("number of tiles must be >= 1")

tiledims = util.ceil_divide(shape, ntiles)
tiledims = _util.ceil_divide(shape, ntiles)

if snap_to is not None:
# Normalize `snap_to` to a tuple of ints.
snap_to = util.as_tuple_of_int(snap_to)
snap_to = _util.as_tuple_of_int(snap_to)

if len(snap_to) != ndim:
if len(snap_to) != ndim: # type: ignore[arg-type]
raise ValueError("size mismatch: shape and snap_to must have same length")
if any(map(lambda s: s < 1, snap_to)):
if any(map(lambda s: s < 1, snap_to)): # type: ignore[arg-type]
raise ValueError("snap_to lengths must be >= 1")

tiledims = util.round_up_to_next_multiple(tiledims, snap_to)
tiledims = _util.round_up_to_next_multiple(tiledims, snap_to)

# Tile dimensions should not exceed the full array dimensions.
tiledims = tuple(np.minimum(tiledims, shape))
Expand Down Expand Up @@ -725,4 +725,4 @@ def multiscale_unwrap(
)

# Store results.
da.store([da_unwrapped, da_conncomp], [unwrapped, conncomp], lock=util.get_lock())
da.store([da_unwrapped, da_conncomp], [unwrapped, conncomp], lock=_util.get_lock())
File renamed without changes.
6 changes: 3 additions & 3 deletions src/tophu/upsample.py → src/tophu/_upsample.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import scipy.fft
from numpy.typing import ArrayLike, NDArray

from . import util
from ._util import iseven

__all__ = [
"upsample_fft",
Expand Down Expand Up @@ -153,13 +153,13 @@ def negfreqbins(n: int) -> slice:
# Nyquist bin in the padded array.
for axis in axes:
n = data.shape[axis]
if util.iseven(n):
if iseven(n):
s = [slice(None)] * data.ndim
s[axis] = n // 2
Y[tuple(s)] *= 0.5
for axis in axes:
n = data.shape[axis]
if util.iseven(n):
if iseven(n):
s1 = [slice(None)] * data.ndim
s1[axis] = n // 2
s2 = [slice(None)] * data.ndim
Expand Down
File renamed without changes.
4 changes: 2 additions & 2 deletions test/test_multiscale.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
from numpy.typing import ArrayLike, NDArray

import tophu
from tophu.multiscale import get_tile_dims
from tophu.unwrap import UnwrapCallback
from tophu import UnwrapCallback
from tophu._multiscale import get_tile_dims

from .simulate import simulate_phase_noise, simulate_terrain

Expand Down

0 comments on commit 1cb3a47

Please sign in to comment.