From 6de191a94cc2ffcd740616bce095bcbcc71d6e0c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Dec 2023 13:37:12 +0000 Subject: [PATCH 1/5] Bump JamesIves/github-pages-deploy-action from 4.4.3 to 4.5.0 Bumps [JamesIves/github-pages-deploy-action](https://github.com/jamesives/github-pages-deploy-action) from 4.4.3 to 4.5.0. - [Release notes](https://github.com/jamesives/github-pages-deploy-action/releases) - [Commits](https://github.com/jamesives/github-pages-deploy-action/compare/v4.4.3...v4.5.0) --- updated-dependencies: - dependency-name: JamesIves/github-pages-deploy-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/test_doc.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test_doc.yml b/.github/workflows/test_doc.yml index c6067373..a6a0b35a 100644 --- a/.github/workflows/test_doc.yml +++ b/.github/workflows/test_doc.yml @@ -59,7 +59,7 @@ jobs: touch _build/html/.nojekyll - name: Deploy Github Pages 🚀 - uses: JamesIves/github-pages-deploy-action@v4.4.3 + uses: JamesIves/github-pages-deploy-action@v4.5.0 with: branch: gh-pages folder: docs/_build/html/ From 453a203f409ec68e119d0f90c7cb1ee8fedf78b6 Mon Sep 17 00:00:00 2001 From: Pierre Robiglio <83019028+thomasrobiglio@users.noreply.github.com> Date: Wed, 6 Dec 2023 17:43:58 +0100 Subject: [PATCH 2/5] feat : added tutorial for core metrics (ent. and mi) --- examples/tutorials/tutorial_core.py | 73 +++++++++++++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100644 examples/tutorials/tutorial_core.py diff --git a/examples/tutorials/tutorial_core.py b/examples/tutorials/tutorial_core.py new file mode 100644 index 00000000..046879d3 --- /dev/null +++ b/examples/tutorials/tutorial_core.py @@ -0,0 +1,73 @@ +""" +Core information theoretical metrics +==================================== + +This tutorial guides you through the core information theoretical metrics +available. These metrics are the entropy and the mutual information. +""" +import numpy as np +from hoi.core import get_entropy +from hoi.core import get_mi + +############################################################################### +# Entropy +# ------- +# +# The fundamental information theoretical metric is the entropy. Most of the +# other higher-order metrics of information theory defined in HOI are based on +# the entropy. +# +# In HOI there are 4 different methods to compute the entropy, in this tutorial +# we will use the estimation based on the Gaussian Copula estimation. +# +# Let's start by extracting a sample `x` from a multivariate Gaussian distribution +# with zero mean and unit variance: + +D = 3 +x = np.random.normal(size=(D,1000)); + +############################################################################### +# Now we can compute the entropy of `x`. We use the function `get_entropy` to +# build a callable function to compute the entropy. The function `get_entropy` +# takes as input the method to use to compute the entropy. In this case we use +# the Gaussian Copula estimation, so we set the method to `"gcmi": + +entropy = get_entropy(method="gcmi") + +############################################################################### +# Now we can compute the entropy of `x` by calling the function `entropy`. This +# function takes as input an array of data of shape `(n_features, n_samples)`. For +# the Gaussian Copula estimation, the entropy is computed in bits. We have: + +print("Entropy of x: %.2f" % entropy(x)) + +############################################################################### +# For comparison, we can compute the entropy of a multivariate Gaussian with +# the analytical formula, which is: +# .. math:: +# H(X) = \frac{1}{2} \log \left( (2 \pi e)^D \det(\Sigma) \right) +# where :math:`D` is the dimensionality of the Gaussian and :math:`\Sigma` is +# the covariance matrix of the Gaussian. We have: + +C = np.cov(x, rowvar=True) +entropy_analytical = (0.5*(np.log(np.linalg.det(C))+D*(1+np.log(2*np.pi))))/np.log(2) +print("Analytical entropy of x: %.2f" % entropy_analytical) + +############################################################################### +# We see that the two values are very close. +# +# Mutual information +# ------------------ +# +# The mutual information is another fundamental information theoretical metric. +# In this tutorial we will compute the mutual information between two variables +# `x` and `y` extracted from a multivariate Gaussian distribution with zero mean +# and unit variance. We show that since the two variables are independent, the +# mutual information is very close to zero. + +D = 3 +x = np.random.normal(size=(D,1000)) +y = np.random.normal(size=(D,1000)) + +mi = get_mi(method="gcmi") +print("Mutual information between x and y: %.2f" % mi(x,y)) \ No newline at end of file From 80f1b6a1a367278d73882e9d09c0e773ff119e33 Mon Sep 17 00:00:00 2001 From: Pierre Robiglio <83019028+thomasrobiglio@users.noreply.github.com> Date: Wed, 6 Dec 2023 17:44:44 +0100 Subject: [PATCH 3/5] style : formatted using black --- examples/tutorials/tutorial_core.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/examples/tutorials/tutorial_core.py b/examples/tutorials/tutorial_core.py index 046879d3..890f34c4 100644 --- a/examples/tutorials/tutorial_core.py +++ b/examples/tutorials/tutorial_core.py @@ -17,14 +17,14 @@ # other higher-order metrics of information theory defined in HOI are based on # the entropy. # -# In HOI there are 4 different methods to compute the entropy, in this tutorial +# In HOI there are 4 different methods to compute the entropy, in this tutorial # we will use the estimation based on the Gaussian Copula estimation. # # Let's start by extracting a sample `x` from a multivariate Gaussian distribution # with zero mean and unit variance: D = 3 -x = np.random.normal(size=(D,1000)); +x = np.random.normal(size=(D, 1000)) ############################################################################### # Now we can compute the entropy of `x`. We use the function `get_entropy` to @@ -35,14 +35,14 @@ entropy = get_entropy(method="gcmi") ############################################################################### -# Now we can compute the entropy of `x` by calling the function `entropy`. This +# Now we can compute the entropy of `x` by calling the function `entropy`. This # function takes as input an array of data of shape `(n_features, n_samples)`. For # the Gaussian Copula estimation, the entropy is computed in bits. We have: print("Entropy of x: %.2f" % entropy(x)) ############################################################################### -# For comparison, we can compute the entropy of a multivariate Gaussian with +# For comparison, we can compute the entropy of a multivariate Gaussian with # the analytical formula, which is: # .. math:: # H(X) = \frac{1}{2} \log \left( (2 \pi e)^D \det(\Sigma) \right) @@ -50,11 +50,13 @@ # the covariance matrix of the Gaussian. We have: C = np.cov(x, rowvar=True) -entropy_analytical = (0.5*(np.log(np.linalg.det(C))+D*(1+np.log(2*np.pi))))/np.log(2) +entropy_analytical = ( + 0.5 * (np.log(np.linalg.det(C)) + D * (1 + np.log(2 * np.pi))) +) / np.log(2) print("Analytical entropy of x: %.2f" % entropy_analytical) ############################################################################### -# We see that the two values are very close. +# We see that the two values are very close. # # Mutual information # ------------------ @@ -66,8 +68,8 @@ # mutual information is very close to zero. D = 3 -x = np.random.normal(size=(D,1000)) -y = np.random.normal(size=(D,1000)) +x = np.random.normal(size=(D, 1000)) +y = np.random.normal(size=(D, 1000)) mi = get_mi(method="gcmi") -print("Mutual information between x and y: %.2f" % mi(x,y)) \ No newline at end of file +print("Mutual information between x and y: %.2f" % mi(x, y)) From 98f99bbfc620ac75c647218ce9d24638d900a8ac Mon Sep 17 00:00:00 2001 From: Pierre Robiglio <83019028+thomasrobiglio@users.noreply.github.com> Date: Wed, 6 Dec 2023 17:50:46 +0100 Subject: [PATCH 4/5] fix : changed y to uniform distributed --- examples/tutorials/tutorial_core.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/examples/tutorials/tutorial_core.py b/examples/tutorials/tutorial_core.py index 890f34c4..aa84f0a4 100644 --- a/examples/tutorials/tutorial_core.py +++ b/examples/tutorials/tutorial_core.py @@ -63,13 +63,14 @@ # # The mutual information is another fundamental information theoretical metric. # In this tutorial we will compute the mutual information between two variables -# `x` and `y` extracted from a multivariate Gaussian distribution with zero mean -# and unit variance. We show that since the two variables are independent, the -# mutual information is very close to zero. +# `x` and `y`. `x` is a multivariate Gaussian with zero mean and unit variance, +# while `y` is a multivariate uniform distribution in the interval :math:`[0,1]`. +# Since the two variables are independent, the mutual information between them is expected +# to be zero. D = 3 x = np.random.normal(size=(D, 1000)) -y = np.random.normal(size=(D, 1000)) +y = np.random.rand(D,1000) mi = get_mi(method="gcmi") print("Mutual information between x and y: %.2f" % mi(x, y)) From ff45352bc227b8969c5fdc854e1b8132af7fb75a Mon Sep 17 00:00:00 2001 From: Pierre Robiglio <83019028+thomasrobiglio@users.noreply.github.com> Date: Wed, 6 Dec 2023 17:57:36 +0100 Subject: [PATCH 5/5] style : code formatted with black --- examples/tutorials/tutorial_core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/tutorials/tutorial_core.py b/examples/tutorials/tutorial_core.py index aa84f0a4..ff83dd5c 100644 --- a/examples/tutorials/tutorial_core.py +++ b/examples/tutorials/tutorial_core.py @@ -70,7 +70,7 @@ D = 3 x = np.random.normal(size=(D, 1000)) -y = np.random.rand(D,1000) +y = np.random.rand(D, 1000) mi = get_mi(method="gcmi") print("Mutual information between x and y: %.2f" % mi(x, y))