Skip to content

Commit

Permalink
changed siz -> size
Browse files Browse the repository at this point in the history
  • Loading branch information
Matteo NERI authored and Matteo NERI committed Jul 9, 2024
1 parent 7f81fc2 commit d60102b
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 6 deletions.
8 changes: 4 additions & 4 deletions hoi/core/entropies.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ def entropy_gauss(x: jnp.array) -> jnp.array:

@partial(jax.jit, static_argnums=(1,))
def entropy_bin(
x: jnp.array, base: int = 2, bin_siz: float = None
x: jnp.array, base: int = 2, bin_size: float = None
) -> jnp.array:
"""Entropy using binning.
Expand All @@ -262,7 +262,7 @@ def entropy_bin(
be discretize
base : int | 2
The logarithmic base to use. Default is base 2.
bin_siz : float | None
bin_size : float | None
The size of all the bins. Will be taken in consideration only if all
bins have the same size, for histogram estimator.
Expand All @@ -283,8 +283,8 @@ def entropy_bin(
)[1]
probs = counts / n_samples

if bin_siz is not None:
bins = jnp.where(probs != 0, bin_siz, 0)
if bin_size is not None:
bins = jnp.where(probs != 0, bin_size, 0)
return -jax.scipy.special.rel_entr(probs, bins).sum() / jnp.log(base)
else:
return (jax.scipy.special.entr(probs)).sum() / jnp.log(base)
Expand Down
4 changes: 2 additions & 2 deletions hoi/utils/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,9 +176,9 @@ def digitize(x, n_bins, axis=0, use_sklearn=False, **kwargs):
# in order to allow the histogram estimator, also the size of the bins is
# returned.
bins_arr = (x.max(axis=axis) - x.min(axis=axis)) / n_bins
b_siz = jnp.prod(bins_arr)
b_size = jnp.prod(bins_arr)
if not use_sklearn:
return np.apply_along_axis(digitize_1d, axis, x, n_bins), b_siz
return np.apply_along_axis(digitize_1d, axis, x, n_bins), b_size
else:
kwargs["n_bins"] = n_bins
kwargs["encode"] = "ordinal"
Expand Down

0 comments on commit d60102b

Please sign in to comment.