diff --git a/.gitlab-ci.hpc.yml b/.gitlab-ci.hpc.yml
deleted file mode 100644
index cd214c4..0000000
--- a/.gitlab-ci.hpc.yml
+++ /dev/null
@@ -1,34 +0,0 @@
-include:
-  - project: hpc/gitlab-pipelines
-    file: spack-build.gitlab-ci.yml
-    ref: '$GITLAB_PIPELINES_BRANCH'
-
-variables:
-  SPACK_PACKAGE: spykfunc
-
-.fz_test:
-  extends: .spack_test
-  needs: [spack_build]
-  variables:
-    bb5_constraint: nvme
-    bb5_cpus_per_task: 72
-    bb5_memory: 0
-    bb5_ntasks: 1
-
-circuit1k_s2f:
-  extends: .fz_test
-
-circuit1k_s2f_split_and_merge:
-  extends: .fz_test
-
-circuit1k_s2s:
-  extends: .fz_test
-
-circuit1k_s2s_nodesets:
-  extends: .fz_test
-
-circuit2k_s2f:
-  extends: .fz_test
-
-circuit2k_s2s:
-  extends: .fz_test
diff --git a/.gitlab-ci.nse.yml b/.gitlab-ci.nse.yml
deleted file mode 100644
index c05bd6a..0000000
--- a/.gitlab-ci.nse.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-include:
-  - project: nse/ci
-    file:
-      - /ci/jobs/lint.yml
-  - project: hpc/gitlab-pipelines
-    file:
-      - tox-nse-docs.gitlab-ci.yml
-
-variables:
-  KUBERNETES_CPU_LIMIT: 4100m
-  KUBERNETES_CPU_REQUEST: 4
-  KUBERNETES_MEMORY_LIMIT: 8Gi
-  KUBERNETES_MEMORY_REQUEST: 8Gi
-
-.tox-template:
-  variables:
-    SYS_PACKAGES: cmake git java-11-openjdk ninja-build mpich-autoload mpich-devel
-    CC: /usr/lib64/mpich/bin/mpicc
-    CXX: /usr/lib64/mpich/bin/mpicxx
-    PRE_BUILD_COMMAND: "export CC=/usr/lib64/mpich/bin/mpicc CXX=/usr/lib64/mpich/bin/mpicxx"
-
-build-wheels:
-  parallel:
-    matrix:
-      - PY_VERSION: [py311]
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
deleted file mode 100644
index 4d28b70..0000000
--- a/.gitlab-ci.yml
+++ /dev/null
@@ -1,15 +0,0 @@
-nse_stack:
-  stage: test
-  trigger:
-    include:
-      - local: .gitlab-ci.nse.yml
-    strategy: depend
-
-hpc_stack:
-  stage: test
-  trigger:
-    include:
-      - local: .gitlab-ci.hpc.yml
-    strategy: depend
-  variables:
-    GITLAB_PIPELINES_BRANCH: $GITLAB_PIPELINES_BRANCH
diff --git a/AUTHORS.txt b/AUTHORS.txt
index 0562350..3f7df0e 100644
--- a/AUTHORS.txt
+++ b/AUTHORS.txt
@@ -1,4 +1,5 @@
 Matthias Wolf
 Fernando Pereira
 Sergio Rivas Gomez
+Luc Dominic Grosheintz-Laval
 Tristan Carel
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..2dd02a9
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,5 @@
+FROM python:latest
+
+RUN apt-get update \
+ && apt-get install -y libopenmpi-dev
+RUN pip install /workspace
diff --git a/README.rst b/README.rst
index 8c199e8..ca38da8 100644
--- a/README.rst
+++ b/README.rst
@@ -1,3 +1,6 @@
+.. image:: doc/source/_static/banner.jpg
+   :alt: A nice banner for functionalizer
+
 Functionalizer
 ==============
 
@@ -10,9 +13,27 @@ To process the large quantities of data optimally, this software uses PySpark.
 Installation
 ------------
 
-For manual installation via `pip`, a compiler handling C++17 will be necessary.
-Otherwise, all `git` submodules should be checked out and `cmake` as well as `ninja` be
-installed.
+The easiest way to install `functionalizer` is via:
+
+.. code-block:: console
+
+   pip install functionalizer
+
+Due to a dependency on ``mpi4py``, a MPI implementation needs to be installed on the
+system used.  On Ubuntu, this can be achieved with:
+
+.. code-block:: console
+
+   apt-get install -y libopenmpi-dev
+
+For manual installation from sources via ``pip``, a compiler handling C++17 will be
+necessary.  Furthermore, all ``git`` submodules should be checked out:
+
+.. code-block:: console
+
+   gh repo clone BlueBrain/functionalizer -- --recursive --shallow-submodules
+   cd functionalizer
+   pip install .
 
 Spark and Hadoop should be installed and set up as runtime dependencies.
 
@@ -27,4 +48,12 @@ Where the final argument `edges.h5` may also be a directory of Parquet files.  W
 running on a cluster with multiple nodes, care should be taken that every rank occupies a
 whole node, Spark will then spread out across each node.
 
+Acknowledgment
+--------------
+The development of this software was supported by funding to the Blue Brain Project,
+a research center of the École polytechnique fédérale de Lausanne (EPFL),
+from the Swiss government's ETH Board of the Swiss Federal Institutes of Technology.
+
+Copyright (c) 2017-2024 Blue Brain Project/EPFL
+
 .. _SONATA extension: https://sonata-extension.readthedocs.io
diff --git a/doc/source/_static/banner.jpg b/doc/source/_static/banner.jpg
new file mode 100644
index 0000000..9ab45e4
Binary files /dev/null and b/doc/source/_static/banner.jpg differ
diff --git a/doc/source/conf.py b/doc/source/conf.py
index f74658b..8579f10 100644
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -56,3 +56,4 @@
 html_theme_options = {"metadata_distribution": "functionalizer"}
 html_title = "functionalizer"
 html_show_sourcelink = False
+html_static_path = ['_static']
diff --git a/pyproject.toml b/pyproject.toml
index aac6192..a15e9f7 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,12 +1,12 @@
 [build-system]
-requires = ["scikit-build-core[pyproject]", "setuptools_scm"]
+requires = ["scikit-build-core[pyproject]", "setuptools_scm", "cmake", "ninja"]
 build-backend = "scikit_build_core.build"
 
 [project]
 name = "functionalizer"
 dynamic = ["version"]
 description = "A PySpark implementation of the Blue Brain Project Functionalizer"
-license = {"text" = "Blue Brain Project License"}
+license = {"file" = "LICENSE.txt"}
 authors = [{"name" = "BlueBrain HPC", "email" = "bbp-ou-hpc@epfl.ch"}]
 maintainers = [
     {"name" = "Matthias Wolf", "email" = "matthias.wolf@epfl.ch"},
@@ -74,3 +74,12 @@ max-args = 8
 [tool.black]
 extend-exclude = 'deps\/.*$'
 line_length = 100
+
+[tool.cibuildwheel]
+skip = ["cp3{6,7,8}-*", "pp*", "*-win32", "*-manylinux_i686", "*-musllinux_i686", "*-musllinux_x86_64", "*-musllinux_aarch64"]
+# MPI needed for testing with mpi4py
+before-all = "yum install -y openmpi3-devel java-11-openjdk"
+environment = { MPICC="/usr/lib64/openmpi3/bin/mpicc" }
+# Remove setuptools with Spark v4 - v3 has implicit dependency on distutils
+test-requires = ["pytest", "mock", "setuptools"]
+test-command = "pytest --run-slow {project}/tests"