diff --git a/.gitignore b/.gitignore index 49c5e6e..f2c3b30 100644 --- a/.gitignore +++ b/.gitignore @@ -85,4 +85,10 @@ venv/ #tests alice -alice_notebooks/ \ No newline at end of file +alice_notebooks/ + +*.tif +*.npy + +src/tapenade/notebooks/demo_data/* +!src/tapenade/notebooks/demo_data/.keep \ No newline at end of file diff --git a/MANIFEST.in b/MANIFEST.in index f3155af..8b64547 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,5 +1,7 @@ include LICENSE include README.md +recursive-include src/tapenade/notebooks *.ipynb +recursive-include src/tapenade/notebooks/demo_data .keep recursive-exclude * __pycache__ recursive-exclude * *.py[co] diff --git a/README.md b/README.md index 34548af..ff276e4 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,7 @@ The pipeline is composed of the following methods: 5. **Spatial correlation analysis**: Computes a spatial correlation map between two continuous fields. 6. **Deformation tensors analysis**: Computes deformation tensors (inertia, true strain, etc.) from segmented objects. -All methods are explained in details in our Jupyter notebooks, which are available in the [notebooks](notebooks/) folder. +All methods are explained in details in our Jupyter notebooks, which are available in the [notebooks](src/tapenade/notebooks/) folder. ## Installation @@ -101,15 +101,15 @@ Though not mandatory, we also recommend running the inference with StarDist3D on The methods described above are available at the following locations: -1. **Spectral filtering**: [Notebook](notebooks/spectral_filtering_notebook.ipynb) -2. **Registration & fusion**: [Code](src/tapenade/reconstruction/_reconstruct.py), [Notebook](notebooks/registration_notebook.ipynb) -3. **Pre-processing**: This [script](src/tapenade/preprocessing/_preprocessing.py) gathers all preprocessing functions, [Notebook](notebooks/preprocessing_notebook.ipynb) -4. **Segmentation**: [Code](src/tapenade/segmentation/_segment.py), [Notebook](notebooks/segmentation_notebook.ipynb) -4. **Masked smoothing**: [Code](src/tapenade/preprocessing/_preprocessing.py) (it is one of the preprocessing function), [Notebook](notebooks/masked_gaussian_smoothing_notebook.ipynb) -5. **Spatial correlation analysis**: [Code](src/tapenade/analysis/spatial_correlation/_spatial_correlation_plotter.py), [Notebook](notebooks/spatial_correlation_analysis_notebook.ipynb) -6. **Deformation tensors analysis**: [Code](src/tapenade/analysis/deformation/deformation_quantification.py), [Notebook](notebooks/deformation_analysis_notebook.ipynb) +1. **Spectral filtering**: [Notebook](src/tapenade/notebooks/spectral_filtering_notebook.ipynb) +2. **Registration & fusion**: [Code](src/tapenade/reconstruction/_reconstruct.py), [Notebook](src/tapenade/notebooks/registration_notebook.ipynb) +3. **Pre-processing**: This [script](src/tapenade/preprocessing/_preprocessing.py) gathers all preprocessing functions, [Notebook](src/tapenade/notebooks/preprocessing_notebook.ipynb) +4. **Segmentation**: [Code](src/tapenade/segmentation/_segment.py), [Notebook](src/tapenade/notebooks/segmentation_notebook.ipynb) +4. **Masked smoothing**: [Code](src/tapenade/preprocessing/_preprocessing.py) (it is one of the preprocessing function), [Notebook](src/tapenade/notebooks/masked_gaussian_smoothing_notebook.ipynb) +5. **Spatial correlation analysis**: [Code](src/tapenade/analysis/spatial_correlation/_spatial_correlation_plotter.py), [Notebook](src/tapenade/notebooks/spatial_correlation_analysis_notebook.ipynb) +6. **Deformation tensors analysis**: [Code](src/tapenade/analysis/deformation/deformation_quantification.py), [Notebook](src/tapenade/notebooks/deformation_analysis_notebook.ipynb) -All methods are explained in details in our Jupyter notebooks, which are available in the [notebooks](notebooks/) folder. +All methods are explained in details in our Jupyter notebooks, which are available in the [notebooks](src/tapenade/notebooks/) folder. ## Complementary Napari plugins (for graphical user interfaces) diff --git a/notebooks/segmentation_notebook.ipynb b/notebooks/segmentation_notebook.ipynb deleted file mode 100644 index e0ba8fa..0000000 --- a/notebooks/segmentation_notebook.ipynb +++ /dev/null @@ -1,336 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "ed08f86b", - "metadata": {}, - "source": [ - "# Segmentation Notebook" - ] - }, - { - "cell_type": "markdown", - "id": "86ea7a4a", - "metadata": {}, - "source": [ - "### After clicking on a code cell, press \"Shift+Enter\" to run the code, or click on the \"Run\" button in the toolbar above.
\n", - "\n", - "### Replace \"...\" signs with the appropriate path to your data.\n", - "
" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "96194952", - "metadata": {}, - "outputs": [], - "source": [ - "from tapenade.preprocessing import (\n", - " global_contrast_enhancement,\n", - " local_contrast_enhancement,\n", - ")\n", - "from tapenade.preprocessing._preprocessing import change_array_pixelsize\n", - "from tapenade.segmentation._segment import segment_stardist\n", - "from tapenade.preprocessing.segmentation_postprocessing import remove_small_objects\n", - "import numpy as np\n", - "import tifffile\n", - "import matplotlib.pyplot as plt\n", - "from skimage.transform import resize\n", - "from pathlib import Path\n", - "from skimage.measure import regionprops" - ] - }, - { - "cell_type": "markdown", - "id": "6f469a65", - "metadata": {}, - "source": [ - "Enter the path to your data and to stardist model.\n", - "\n", - "The mask is optional, in the case you use the local normalization method." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "54e5d627", - "metadata": {}, - "outputs": [], - "source": [ - "path_stardist_model = ... #should finish by /tapenade-stardist\n", - "\n", - "main_folder = ...\n", - "path_to_data = Path(main_folder) / ... #for example Path(main_folder) / \"data/image.tif\"\n", - "path_to_mask = Path(main_folder) / ... #for example Path(main_folder) / \"data/mask.tif\"\n", - "data = tifffile.imread(Path(path_to_data))\n", - "mask = tifffile.imread(Path(path_to_mask))" - ] - }, - { - "cell_type": "markdown", - "id": "643141b0", - "metadata": {}, - "source": [ - "### Quick pre-processing\n", - "If your image already has the appropriate size and is normalized between 0 and 1, you can skip this and directly go to the prediction.\n", - "\n", - "Our StarDist model is trained on isotropic images of voxel size (0.62,0.62,0.62) Β΅m/pix, which means it is trained to segment objects of the approximate size of 15pixels. Adjust your image size to match this pixel size and do not hesitate to try different sizes on a subset of your data to optimize the result.\n", - "\n", - "The image given to StarDist should be normalized between 0 and 1. You can either use our local contrast enhancement method, which can drastically improve perofrmance in deeper planes, or the classical global contrast enhancement." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5ca068d8", - "metadata": {}, - "outputs": [], - "source": [ - "array_pixelsize = ... #ZYX, for example (1,0.2,0.2)\n", - "network_pixelsize = (0.62,0.62,0.62) #adjust if oversegmentation or undersegmentation\n", - "data_iso = change_array_pixelsize(data,input_pixelsize=array_pixelsize, output_pixelsize=network_pixelsize)\n", - "mask_iso = change_array_pixelsize(mask,input_pixelsize=array_pixelsize, output_pixelsize=network_pixelsize,order=0)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a6cf6eba-b485-4af7-af31-2da7a5eb9039", - "metadata": {}, - "outputs": [], - "source": [ - "data_normalized = local_contrast_enhancement(image=data_iso,box_size= 25, perc_low=1,perc_high=99,mask=mask_iso)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3baa4c69", - "metadata": {}, - "outputs": [], - "source": [ - "data_normalized = global_contrast_enhancement(image=data_iso, perc_low=1,perc_high=99)" - ] - }, - { - "cell_type": "markdown", - "id": "ab1bf08b", - "metadata": {}, - "source": [ - "Check the resulting image on Napari if installed" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "80ae83d8", - "metadata": {}, - "outputs": [], - "source": [ - "import napari\n", - "viewer=napari.Viewer()\n", - "viewer.add_image(data_iso,colormap='inferno')\n", - "viewer.add_image(data_normalized,colormap='inferno')\n", - "napari.run()" - ] - }, - { - "cell_type": "markdown", - "id": "6a497ece", - "metadata": {}, - "source": [ - "## Run the prediction using StarDist3D" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9c13606f", - "metadata": {}, - "outputs": [], - "source": [ - "labels = segment_stardist(data_normalized, path_stardist_model)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7d01b380", - "metadata": {}, - "outputs": [], - "source": [ - "labels_at_array_pixelsize = resize(\n", - " labels,\n", - " data.shape,\n", - " anti_aliasing=False,\n", - " order=0,\n", - " preserve_range=True,\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "9d2a2834", - "metadata": {}, - "source": [ - "Check the result on napari" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7df6f7b6", - "metadata": {}, - "outputs": [], - "source": [ - "import napari\n", - "viewer=napari.Viewer()\n", - "viewer.add_image(data,colormap='inferno')\n", - "viewer.add_labels(labels_at_array_pixelsize)\n", - "napari.run()" - ] - }, - { - "cell_type": "markdown", - "id": "ffb2cc73", - "metadata": {}, - "source": [ - "Save if you are satisfied.\n", - "If not, you can adjust the pixel size or the normalization method." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "fbb9edb5-a77f-4e43-a135-d2b1ecec1e3a", - "metadata": {}, - "outputs": [], - "source": [ - "tifffile.imwrite(Path(main_folder)/'segmentation.tif',labels)" - ] - }, - { - "cell_type": "markdown", - "id": "15a9b05c", - "metadata": {}, - "source": [ - "## Post-processing\n", - "#### 1 - Apply mask to remove out-of-sample detections" - ] - }, - { - "cell_type": "markdown", - "id": "3e5fc305", - "metadata": {}, - "source": [ - "If some cells are detected outside of the image, you might want to delete what is outside your mask.\n", - "\n", - "To create a mask of your image, use the preprocessing notebook or the napari plugin napari-tapenade-processing" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "55d960ec-576e-43e8-b2b6-7d2e6aef0b94", - "metadata": {}, - "outputs": [], - "source": [ - "labels[mask_iso==0]=0\n", - "tifffile.imwrite(Path(main_folder)/\"labels_corrected.tif\",labels)" - ] - }, - { - "cell_type": "markdown", - "id": "cdfe7e0e", - "metadata": {}, - "source": [ - "#### 2 - Filter small volumes in the segmentation.\n", - "\n", - "First, plot the histogram of cell volumes to evaluate the threshold" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5548ca09", - "metadata": {}, - "outputs": [], - "source": [ - "props=regionprops(labels)\n", - "histo=[]\n", - "for prop in props :\n", - " histo.append(np.sum(prop.area))\n", - "\n", - "plt.hist(histo,bins=100)\n", - "plt.title('Histogram of cell volumes')" - ] - }, - { - "cell_type": "markdown", - "id": "efdc7a5a", - "metadata": {}, - "source": [ - "Then, remove the objects smaller than ```size_min```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ece95ad9", - "metadata": {}, - "outputs": [], - "source": [ - "#choose the size to filter\n", - "size_min =1000\n", - "labels_filtered = remove_small_objects(labels,size_min)\n", - "\n", - "print('Before filtering :',len(np.unique(labels)),'labels \\nAfter filtering :',len(np.unique(labels_filtered)),'labels')" - ] - }, - { - "cell_type": "markdown", - "id": "a1e38b25", - "metadata": {}, - "source": [ - "Visualize the result on napari" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "138b6256", - "metadata": {}, - "outputs": [], - "source": [ - "import napari\n", - "viewer=napari.Viewer()\n", - "viewer.add_image(data_normalized)\n", - "viewer.add_labels(labels,name='labels_not_filtered')\n", - "viewer.add_labels(labels_filtered,name='labels_filtered')\n", - "napari.run()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "env-tapenade", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.14" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/notebooks/spatial_correlation_analysis_notebook.ipynb b/notebooks/spatial_correlation_analysis_notebook.ipynb deleted file mode 100644 index 92d8d82..0000000 --- a/notebooks/spatial_correlation_analysis_notebook.ipynb +++ /dev/null @@ -1,195 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Multiscale analysis of spatial correlation between 2 signals" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### After clicking on a code cell, press \"Shift+Enter\" to run the code, or click on the \"Run\" button in the toolbar above.
\n", - "\n", - "### Replace \"...\" signs with the appropriate path to your data.\n", - "
" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from tapenade.analysis.spatial_correlation import (\n", - " SpatialCorrelationPlotter\n", - ")\n", - "import tifffile" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 1. Data loading" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Load your the two signals that you wish to study the correlation between. If raw image data is loaded, the correlation will be studied at the smallest scale (voxel). To study the correlation at larger scales, we recommend applying our masked gaussian filter method, as presented in the `masked_gaussian_smoothing` notebook." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "path_to_signal1 = ... # e.g T-Bra\n", - "signal1 = tifffile.imread(path_to_signal1)\n", - "\n", - "path_to_signal2 = ... # e.g Sox2\n", - "signal2 = tifffile.imread(path_to_signal2)\n", - "\n", - "# optional but highly recommended\n", - "path_to_mask = ...\n", - "mask = tifffile.imread(path_to_mask)\n", - "\n", - "# optional but recommended if the signals belong to individual instances (e.g nuclei)\n", - "path_to_labels = ... \n", - "labels = tifffile.imread(path_to_labels)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 2. Initializing the plotter" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Initializing the plotter once allows you to test several parameters of the visualization without reprocessing the data." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "spatial_correlation_plotter = SpatialCorrelationPlotter(\n", - " quantity_X=signal1,\n", - " quantity_Y=signal2,\n", - " mask=mask, # optional\n", - " labels=labels # optional\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Plot the correlation:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "fig, ax = spatial_correlation_plotter.get_heatmap_figure()\n", - "fig.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The function `get_heatmap_figure` returns a figure and axis object that can be used to further customize the plot. The figure can be saved to a file using the `savefig` method of the figure object.\n", - "\n", - "The function has many parameters that can be used to customize the plot:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Help on function get_heatmap_figure in module tapenade.analysis.spatial_correlation._spatial_correlation_plotter:\n", - "\n", - "get_heatmap_figure(self, bins: tuple = (40, 40), show_individual_cells: bool = False, show_linear_fit: bool = True, normalize_quantities: bool = False, extent_X: tuple = None, extent_Y: tuple = None, percentiles_X: tuple = (0, 100), percentiles_Y: tuple = (0, 100), log_scale_X: bool = False, log_scale_Y: bool = False, figsize: tuple = (7, 4), label_X: str = 'X', label_Y: str = 'Y', colormap: str = 'plasma', sample_fraction: float = 0.005, display_quadrants: bool = False, fig_ax_tuple: tuple = None)\n", - " Create a heatmap of the spatial correlation between two quantities X and Y.\n", - " \n", - " Parameters:\n", - " - bins: list of two integers, number of bins in each dimension\n", - " - show_individual_cells: bool, if True and if labels was specified in __init__,\n", - " individual cells will be displayed as a scatter plot on top of the heatmap.\n", - " - show_linear_fit: bool, if True, a linear fit will be displayed on the heatmap.\n", - " - normalize_quantities: bool, if True, the quantities will be normalized to have\n", - " zero mean and unit standard deviation.\n", - " - extent_X: tuple of two floats, the extent of the X axis\n", - " - extent_Y: tuple of two floats, the extent of the Y axis\n", - " - percentiles_X: tuple of two floats, only the values of X between these percentiles\n", - " will be considered\n", - " - percentiles_Y: tuple of two floats, the percentiles of Y between thse percentiles\n", - " will be considered\n", - " - log_scale_X: bool, if True, the X axis will be displayed in log scale\n", - " - log_scale_Y: bool, if True, the Y axis will be displayed in log scale\n", - " - figsize: tuple of two floats, the size of the figure\n", - " - label_X: str, the label of the X axis\n", - " - label_Y: str, the label of the Y axis\n", - " - colormap: str, the name of the colormap\n", - " - sample_fraction: float, if labels has not been specified in __init__, sample_fraction\n", - " represents the fraction of the total number of values of X and Y that will be\n", - " randomly selected to be used for the computation of the statistics.\n", - " - display_quadrants: bool, if True, quadrants will be displayed as vertical and horizontal\n", - " dashed lines at the Otsu thresholds of X and Y.\n", - " - fig_ax_tuple: tuple of matplotlib figure and axis, if not None, the heatmap will be\n", - " displayed on the provided figure and axis.\n", - " \n", - " Returns:\n", - " - fig: matplotlib figure\n", - " - ax: matplotlib axis\n", - "\n" - ] - } - ], - "source": [ - "help(SpatialCorrelationPlotter.get_heatmap_figure)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "napari-mine", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.14" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/notebooks/spectral_filtering_notebook.ipynb b/notebooks/spectral_filtering_notebook.ipynb deleted file mode 100644 index 1b57f4a..0000000 --- a/notebooks/spectral_filtering_notebook.ipynb +++ /dev/null @@ -1,163 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "

πŸ—οΈ

\n", - "This notebook is still under construction !\n", - "

⚠️

\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "import tifffile\n", - "from glob import glob\n", - "from pathlib import Path\n", - "import os\n", - "from tqdm import tqdm" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Loading spectral patterns" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "channels=4 #fill up here the number of channels\n", - "folder_calibration = ...\n", - "\n", - "species = np.array([1,2,3,4])\n", - "#species = np.array([1,2]) #for GFP and Alexa488\n", - "spectralpatterns=np.zeros((221,channels,species.shape[0]))\n", - "for i in species:\n", - " filepath = Path(folder_calibration) / f'species{i}_medfilt.npy'\n", - " with open(filepath, 'rb') as f2:\n", - " avgspectrum_i=np.load(f2)\n", - " if i==2:\n", - " spectralpatterns[:,:,i-1]=avgspectrum_i[:,:]\n", - " else:\n", - " spectralpatterns[:,:,i-1]=avgspectrum_i[:,:]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Create folder structure and get list of samples" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "folder_data =...\n", - "os.mkdir(os.path.join(folder_data, \"filt\"))\n", - "\n", - "samples=[]\n", - "paths = sorted(glob(rf'{folder_data}/*.tif'))\n", - "for path in paths :\n", - " samples.append(Path(path).stem)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Filtering" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "for ind in tqdm(samples):\n", - " img = tifffile.imread(Path(folder_data) / f'{ind}.tif')\n", - " img= np.array(img)\n", - "\n", - " image_filtered=(np.zeros_like(img)).astype(np.int16)\n", - "\n", - " for z in range(img.shape[0]):\n", - " Iavg_channels=np.mean(img[z,:,:,:],axis=(1,2))\n", - " D=np.diag(1/Iavg_channels)\n", - " specpatterns_z=np.zeros((species.shape[0],img.shape[1]))\n", - " for i in species:\n", - " specpatterns_z[i-1,:]=spectralpatterns[z,:,i-1]\n", - " w=np.linalg.inv(specpatterns_z@D@np.transpose(specpatterns_z))@specpatterns_z@D\n", - " for i in species:\n", - "\n", - " image_filtered[z,i-1,:,:]=sum([w[i-1,j]*img[z,j,:,:] for j in range(0,channels)])\n", - "\n", - " image_filtered[image_filtered<0]=0\n", - " image_filtered=image_filtered.astype(np.uint16)\n", - " tifffile.imwrite(Path(folder_data) / f'filt/{ind}_filt.tif', image_filtered,imagej=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Plots" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "z_to_plot=50\n", - "for ind in tqdm(samples):\n", - " img = tifffile.imread(Path(folder_data) / f'{ind}.tif')\n", - " fout, axarrout = plt.subplots(1,channels) \n", - "\n", - " for i in range(channels):\n", - " axarrout[i].imshow(img[z_to_plot,i,:,:])\n", - " print(np.mean(img[z_to_plot,i,:,:]))\n", - " fout2, axarrout2 = plt.subplots(1,channels)\n", - "\n", - " for i in range(channels):\n", - " axarrout2[i].imshow(image_filtered[z_to_plot,i,:,:])\n", - " print(np.mean(image_filtered[z_to_plot,i,:,:]))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "organoid-env", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.13" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/pyproject.toml b/pyproject.toml index 79c7e01..96ed55e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,3 +75,8 @@ exclude = [ target-version = "py310" fix = true +[tool.setuptools.package-data] +"tapenade.notebooks" = [ + "*.ipynb", + "demo_data/.keep", +] diff --git a/src/tapenade/__init__.py b/src/tapenade/__init__.py index e37b10b..36621ee 100644 --- a/src/tapenade/__init__.py +++ b/src/tapenade/__init__.py @@ -1 +1,3 @@ -__version__ = "0.0.18" \ No newline at end of file +__version__ = "0.0.18" +from .data import get_path_to_demo_folder +__all__ = ["get_path_to_demo_folder"] \ No newline at end of file diff --git a/src/tapenade/analysis/spatial_correlation/_spatial_correlation_plotter.py b/src/tapenade/analysis/spatial_correlation/_spatial_correlation_plotter.py index b002a13..e7ae52d 100644 --- a/src/tapenade/analysis/spatial_correlation/_spatial_correlation_plotter.py +++ b/src/tapenade/analysis/spatial_correlation/_spatial_correlation_plotter.py @@ -4,8 +4,85 @@ from matplotlib.colors import LinearSegmentedColormap from skimage.measure import regionprops from skimage.filters import threshold_otsu +from scipy.optimize import least_squares +from scipy.stats import linregress -import tapenade.utils as utils + +def filter_percentiles( + X, percentilesX: tuple = (1, 99), Y=None, percentilesY: tuple = None +): + + if Y is None: + + down, up = percentilesX + + percentile_down = np.percentile(X, down) + percentile_up = np.percentile(X, up) + + mask = np.logical_and(percentile_down < X, percentile_up > X) + + return X[mask] + + else: + + downX, upX = percentilesX + + if percentilesY is None: + downY, upY = percentilesX + else: + downY, upY = percentilesY + + percentile_downX = np.percentile(X, downX) + percentile_downY = np.percentile(Y, downY) + + percentile_upX = np.percentile(X, upX) + percentile_upY = np.percentile(Y, upY) + + maskX = np.logical_and(percentile_downX <= X, percentile_upX >= X) + maskY = np.logical_and(percentile_downY <= Y, percentile_upY >= Y) + + mask = np.logical_and(maskX, maskY) + + return X[mask], Y[mask] + + +def linear_fit( + x, + y, + robust: bool = False, + return_r2: bool = False, + robust_params_init: tuple = None, + robust_f_scale: float = None, +): + + if not robust: + res = linregress(x, y) + + if return_r2: + return res.intercept, res.slope, res.rvalue**2 + else: + return res.intercept, res.slope + + else: + + def f(params, x, y): + return params[0] + params[1] * x - y + + if robust_params_init is None: + robust_params_init = np.ones(2) + + res_robust = least_squares( + f, + robust_params_init, + args=(x, y), + loss="soft_l1", + f_scale=robust_f_scale, + ) + + if return_r2: + raise NotImplementedError + else: + return res_robust.x[0], res_robust.x[1] class SpatialCorrelationPlotter: @@ -138,7 +215,7 @@ def get_heatmap_figure( log_scale_X = False log_scale_Y = False - quantity_X, quantity_Y = utils.filter_percentiles( + quantity_X, quantity_Y = filter_percentiles( X=quantity_X, percentilesX=percentiles_X, Y=quantity_Y, @@ -312,7 +389,7 @@ def _add_linear_fit( quantity_Y: np.ndarray, xedges: np.ndarray, ): - intercept, slope, r2 = utils.linear_fit( + intercept, slope, r2 = linear_fit( x=quantity_X, y=quantity_Y, return_r2=True ) diff --git a/src/tapenade/data.py b/src/tapenade/data.py new file mode 100644 index 0000000..60b6f6c --- /dev/null +++ b/src/tapenade/data.py @@ -0,0 +1,68 @@ +from __future__ import annotations + +import shutil +import tempfile +import urllib.request +from importlib import resources +from pathlib import Path +from typing import Iterable + +_SENTINELS: set[str] = {".keep", ".gitkeep", ".placeholder"} # files that *don’t* count as data + + +def _is_effectively_empty(path: Path, sentinels: Iterable[str]) -> bool: + """True if folder contains nothing except sentinel files.""" + try: + return all(p.name in sentinels for p in path.iterdir()) + except FileNotFoundError: + return True + + +def get_path_to_demo_folder() -> Path: + """ + Ensure `tapenade/notebooks/demo_data` contains data; download if still empty. + Returns + ------- + pathlib.Path pointing to the demo directory (guaranteed to exist). + """ + package = "tapenade" + subfolder = "notebooks/demo_data" + url = "https://zenodo.org/records/15815474/files/demo_data.zip?download=1" + sentinels = _SENTINELS + # ── locate a *writable* directory ──────────────────────────────── + try: + base = resources.files(package) # works for wheels *and* -e installs + data_dir = base / subfolder + except (ModuleNotFoundError, FileNotFoundError): + tmp_root = Path(tempfile.gettempdir()) / f"{package}_data" + tmp_root.mkdir(exist_ok=True) + data_dir = tmp_root + + # ── download if still empty ────────────────────────────────────── + if _is_effectively_empty(data_dir, sentinels): + print(f"πŸ”½ First run – downloading data into {data_dir} …") + tmp = data_dir / "payload.zip" + + urllib.request.urlretrieve(url, tmp) # <- simple, std-lib only + + shutil.unpack_archive(tmp, data_dir) + tmp.unlink() # remove archive after unpack + + nested = data_dir / "demo_data" + if nested.is_dir(): + for child in nested.iterdir(): + dest = data_dir / child.name # move up one level + if dest.exists(): + # overwrite files or merge dirs if they already exist + if dest.is_dir() and child.is_dir(): + shutil.rmtree(dest) + else: + dest.unlink() + child.rename(dest) + nested.rmdir() # remove the now-empty wrapper + + print("βœ… Data ready") + else: + print(f"βœ… Using existing data in {data_dir}") + + return data_dir \ No newline at end of file diff --git a/notebooks/deformation_analysis_notebook.ipynb b/src/tapenade/notebooks/deformation_analysis_notebook.ipynb similarity index 85% rename from notebooks/deformation_analysis_notebook.ipynb rename to src/tapenade/notebooks/deformation_analysis_notebook.ipynb index fdf65c0..b113a34 100644 --- a/notebooks/deformation_analysis_notebook.ipynb +++ b/src/tapenade/notebooks/deformation_analysis_notebook.ipynb @@ -19,11 +19,14 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 20, "metadata": {}, "outputs": [], "source": [ "from tapenade.preprocessing import masked_gaussian_smooth_sparse\n", + "from tapenade import get_path_to_demo_folder\n", + "from pathlib import Path\n", + "from tqdm import tqdm\n", "import tifffile\n", "import numpy as np\n", "from skimage.measure import regionprops\n", @@ -74,15 +77,13 @@ "metadata": {}, "outputs": [], "source": [ - "path_to_data = ...\n", - "path_to_mask = ...\n", - "path_to_labels = ...\n", + "path_to_data = get_path_to_demo_folder()\n", "\n", - "data = tifffile.imread(path_to_data)\n", - "mask = tifffile.imread(path_to_mask)\n", - "labels = tifffile.imread(path_to_labels)\n", + "data = tifffile.imread(path_to_data / 'dapi_isotropized.tif')\n", + "mask = tifffile.imread(path_to_data / 'mask_def_corr.tif')\n", + "labels = tifffile.imread(path_to_data / 'labels_def_corr.tif')\n", "\n", - "pixelsize = np.array([..., ..., ...]) # pixelsize in um/pixel\n", + "pixelsize = np.array([0.621, 0.621, 0.621]) # pixelsize in um/pixel\n", "scale = 1/pixelsize" ] }, @@ -102,9 +103,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 55, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 5139/5139 [00:06<00:00, 841.33it/s] \n" + ] + } + ], "source": [ "# props is made of objects 'prop' with many morphological properties\n", "# like volume, centroid, etc...\n", @@ -120,7 +129,7 @@ "# store volumes to use later\n", "volumes = np.zeros(n_points)\n", "\n", - "for index_label, prop in enumerate(props):\n", + "for index_label, prop in enumerate(tqdm(props)):\n", " add_tensor_inertia(prop, scale=scale)\n", " add_true_strain_tensor(prop, scale=scale)\n", " volumes[index_label] = prop.area\n", @@ -150,7 +159,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 56, "metadata": {}, "outputs": [], "source": [ @@ -182,7 +191,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 57, "metadata": {}, "outputs": [], "source": [ @@ -221,7 +230,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 58, "metadata": {}, "outputs": [], "source": [ @@ -256,7 +265,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 59, "metadata": {}, "outputs": [], "source": [ @@ -289,7 +298,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 60, "metadata": {}, "outputs": [], "source": [ @@ -313,7 +322,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 61, "metadata": {}, "outputs": [], "source": [ @@ -323,14 +332,26 @@ "\n", " viewer.add_image(data, name='data')\n", " viewer.add_labels(labels, name='labels')\n", - " viewer.add_vectors(inertia_vectors, name='inertia_vectors',\n", - " properties={'angle': inertia_angles})\n", - " viewer.add_vectors(true_strain_vectors, name='true_strain_vectors',\n", - " properties={'angle': true_strain_angles})\n", - " viewer.add_vectors(inertia_vectors_grid, name='inertia_vectors_grid',\n", - " properties={'angle': inertia_angles_grid})\n", - " viewer.add_vectors(true_strain_vectors_grid, name='true_strain_vectors_grid',\n", - " properties={'angle': true_strain_angles_grid})\n", + " viewer.add_vectors(\n", + " inertia_vectors, name='inertia_vectors',\n", + " properties={'angle': inertia_angles},\n", + " length=100, edge_width=5, out_of_slice_display=True\n", + " )\n", + " viewer.add_vectors(\n", + " true_strain_vectors, name='true_strain_vectors',\n", + " properties={'angle': true_strain_angles},\n", + " length=300, edge_width=5, out_of_slice_display=True\n", + " )\n", + " viewer.add_vectors(\n", + " inertia_vectors_grid, name='inertia_vectors_grid',\n", + " properties={'angle': inertia_angles_grid},\n", + " length=100, edge_width=5, out_of_slice_display=True\n", + " )\n", + " viewer.add_vectors(\n", + " true_strain_vectors_grid, name='true_strain_vectors_grid',\n", + " properties={'angle': true_strain_angles_grid},\n", + " length=300, edge_width=5, out_of_slice_display=True\n", + " )\n", "\n", " viewer.grid.enabled = True\n", " viewer.grid.shape = (3,2)\n", @@ -355,7 +376,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.14" + "version": "3.10.16" } }, "nbformat": 4, diff --git a/src/tapenade/notebooks/demo_data/.keep b/src/tapenade/notebooks/demo_data/.keep new file mode 100644 index 0000000..e69de29 diff --git a/notebooks/masked_gaussian_smoothing_notebook.ipynb b/src/tapenade/notebooks/masked_gaussian_smoothing_notebook.ipynb similarity index 86% rename from notebooks/masked_gaussian_smoothing_notebook.ipynb rename to src/tapenade/notebooks/masked_gaussian_smoothing_notebook.ipynb index ec2bcd9..97bb751 100644 --- a/notebooks/masked_gaussian_smoothing_notebook.ipynb +++ b/src/tapenade/notebooks/masked_gaussian_smoothing_notebook.ipynb @@ -19,11 +19,13 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from tapenade.preprocessing import masked_gaussian_smoothing\n", + "from tapenade import get_path_to_demo_folder\n", + "from pathlib import Path\n", "import tifffile\n", "import matplotlib.pyplot as plt\n", "from skimage.measure import regionprops\n", @@ -66,8 +68,8 @@ "metadata": {}, "outputs": [], "source": [ - "path_to_image = ... # Replace \"...\" signs with the appropriate path to your data.\n", - "image = tifffile.imread(path_to_image)\n", + "path_to_data = get_path_to_demo_folder()\n", + "image = tifffile.imread(path_to_data / 'bra_isotropized.tif')\n", "mid_plane_ind = int(image.shape[0] // 2)" ] }, @@ -102,16 +104,9 @@ "metadata": {}, "outputs": [], "source": [ - "plt.imshow(image[mid_plane_ind], cmap='gray')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "plt.imshow(smoothed_image[mid_plane_ind], cmap='gray')" + "fig, axes = plt.subplots(1, 2)\n", + "axes[0].imshow(image[mid_plane_ind], cmap='gray')\n", + "axes[1].imshow(smoothed_image[mid_plane_ind], cmap='gray')" ] }, { @@ -141,8 +136,7 @@ "metadata": {}, "outputs": [], "source": [ - "path_to_mask = ... # Replace \"...\" signs with the appropriate path to your data.\n", - "mask = tifffile.imread(path_to_mask)" + "mask = tifffile.imread(path_to_data / 'mask_def_corr.tif')" ] }, { @@ -167,7 +161,11 @@ "metadata": {}, "outputs": [], "source": [ - "plt.imshow(smoothed_image[mid_plane_ind], cmap='gray')" + "fig, axes = plt.subplots(1,2)\n", + "axes[0].imshow(smoothed_image_masked[mid_plane_ind], cmap='gray')\n", + "diff = axes[1].imshow(smoothed_image[mid_plane_ind] - smoothed_image_masked[mid_plane_ind], cmap='RdBu')\n", + "fig.colorbar(diff, ax=axes[1], orientation='horizontal')\n", + "axes[1].set_title(\"Difference between smoothed and masked smoothed image\", fontsize=8)" ] }, { @@ -199,8 +197,7 @@ "metadata": {}, "outputs": [], "source": [ - "path_to_labels = ... # Replace \"...\" signs with the appropriate path to your data.\n", - "labels = tifffile.imread(path_to_labels)\n", + "labels = tifffile.imread(path_to_data / 'labels_def_corr.tif')\n", "\n", "mask_nuclei_instances = labels.astype(bool)" ] @@ -234,7 +231,14 @@ "metadata": {}, "outputs": [], "source": [ - "plt.imshow(smoothed_image_masked2[mid_plane_ind], cmap='gray')" + "# plt.imshow(smoothed_image_masked2[mid_plane_ind], cmap='gray')\n", + "fig, axes = plt.subplots(1,2)\n", + "axes[0].imshow(smoothed_image_masked2[mid_plane_ind], cmap='gray')\n", + "foo = smoothed_image_masked[mid_plane_ind]\n", + "foo[~mask_nuclei_instances[mid_plane_ind]] = 0\n", + "diff = axes[1].imshow(foo - smoothed_image_masked2[mid_plane_ind], cmap='RdBu', vmin=-300, vmax=300)\n", + "fig.colorbar(diff, ax=axes[1], orientation='horizontal')\n", + "axes[1].set_title(\"Difference between masked smoothed and\\n masked with labels smooth images\", fontsize=8)" ] }, { @@ -280,19 +284,6 @@ "Smart by loading a mask of the inside of the sample, and the labels of the object instances." ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "path_to_mask = ... # Replace \"...\" signs with the appropriate path to your data.\n", - "mask = tifffile.imread(path_to_mask)\n", - "\n", - "path_to_labels = ... # Replace \"...\" signs with the appropriate path to your data.\n", - "labels = tifffile.imread(path_to_labels)" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -346,7 +337,9 @@ "metadata": {}, "outputs": [], "source": [ - "plt.imshow(object_density_field[mid_plane_ind], cmap='inferno')" + "im=plt.imshow(object_density_field[mid_plane_ind], cmap='inferno')\n", + "plt.title(\"Object density field smoothed (masked correction)\", fontsize=8)\n", + "fig.colorbar(im)" ] }, { @@ -398,7 +391,9 @@ "metadata": {}, "outputs": [], "source": [ - "plt.imshow(volume_fraction_field[mid_plane_ind], cmap='viridis')" + "im=plt.imshow(volume_fraction_field[mid_plane_ind], cmap='viridis')\n", + "plt.title(\"Volume fraction field smoothed (masked correction)\", fontsize=8)\n", + "fig.colorbar(im)" ] }, { @@ -437,19 +432,6 @@ "In this case, we will illustrate the method by computing a continuous field of object volume (e.g nuclei) from the segmented instances." ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "path_to_mask = ... # Replace \"...\" signs with the appropriate path to your data.\n", - "mask = tifffile.imread(path_to_mask)\n", - "\n", - "path_to_labels = ... # Replace \"...\" signs with the appropriate path to your data.\n", - "labels = tifffile.imread(path_to_labels)" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -503,7 +485,9 @@ "metadata": {}, "outputs": [], "source": [ - "plt.imshow(smoothed_volume_field[mid_plane_ind], cmap='cividis')" + "im=plt.imshow(smoothed_volume_field[mid_plane_ind], cmap='cividis')\n", + "plt.title(\"Smoothed volume field (masked correction)\", fontsize=8)\n", + "fig.colorbar(im)" ] }, { @@ -520,7 +504,7 @@ " viewer.add_image(smoothed_volume_field, name='smoothed_volume_field', colormap='cividis')\n", "\n", " viewer.grid.enabled = True\n", - " viewer.reset_view()\n" + " viewer.reset_view()" ] } ], @@ -540,7 +524,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.14" + "version": "3.10.16" } }, "nbformat": 4, diff --git a/notebooks/preprocessing_notebook.ipynb b/src/tapenade/notebooks/preprocessing_notebook.ipynb similarity index 87% rename from notebooks/preprocessing_notebook.ipynb rename to src/tapenade/notebooks/preprocessing_notebook.ipynb index d61d5b4..f1c62f4 100644 --- a/notebooks/preprocessing_notebook.ipynb +++ b/src/tapenade/notebooks/preprocessing_notebook.ipynb @@ -31,6 +31,8 @@ " print(\"Napari is not installed. Some parts of the notebook will not be available.\")\n", " napari_available = False\n", "\n", + "from pathlib import Path\n", + "from tapenade import get_path_to_demo_folder\n", "from tapenade.preprocessing import (\n", " change_array_pixelsize,\n", " compute_mask,\n", @@ -38,9 +40,6 @@ " local_contrast_enhancement,\n", " align_array_major_axis,\n", " crop_array_using_mask\n", - ")\n", - "from tapenade.preprocessing.segmentation_postprocessing import (\n", - " remove_labels_outside_of_mask\n", ")" ] }, @@ -58,11 +57,13 @@ "metadata": {}, "source": [ "From a raw image, a classical preprocessing pipeline would go through the following steps:\n", - "1. **correcting for anisotropy**: dilate the image shape to make it isotropic.\n", - "2. **computing the mask**: compute a boolean (0/1) mask of background/foreground voxels.\n", - "3. **image contrast enhancement**: enhance the contrast of the image either globally or in local regions of the image to make it more homogeneous.\n", - "4. **image segmentation**: extract the objects of interest from the image, e.g with Stardist3D. ***NOT COVERED IN THIS NOTEBOOK***\n", - "5. **spatio-temporal registration**: correct for object drift, or fuse two images spatially. ***NOT COVERED IN THIS NOTEBOOK***\n", + "1. **spatio-temporal registration**: correct for object drift, or fuse two images spatially. ***NOT COVERED IN THIS NOTEBOOK***\n", + "2. **correcting for anisotropy**: dilate the image shape to make it isotropic.\n", + "3. **computing the mask**: compute a boolean (0/1) mask of background/foreground voxels.\n", + "4. **image contrast enhancement**: enhance the contrast of the image either globally or in local regions of the image to make it more homogeneous.\n", + "5. **image segmentation**: extract the objects of interest from the image, e.g with Stardist3D. ***NOT COVERED IN THIS NOTEBOOK***\n", + "\n", + "Secundary preprocessing steps:\n", "6. **forcing axis alignment**: specify axis to align the major axis of the objects to.\n", "7. **cropping array to mask**: crop any array (image, labels, or mask) to the smallest bounding box containing the mask.\n", "\n", @@ -109,9 +110,9 @@ "metadata": {}, "outputs": [], "source": [ - "path_to_data = ...\n", + "path_to_data = get_path_to_demo_folder()\n", "\n", - "data = tifffile.imread(path_to_data)\n", + "data = tifffile.imread(path_to_data / \"image.tif\")\n", "\n", "if display_in_napari:\n", " viewer = napari.view_image(data, name='raw data')" @@ -121,7 +122,21 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## 1. Correcting for anisotropy" + "## 1. Spatio-temporal registration" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As stated above, we do not cover the spatio-temporal registration step in this notebook. We refer the reader to the `registration` notebook provided with this package." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Correcting for anisotropy" ] }, { @@ -180,7 +195,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## 2. Computing the mask" + "## 3. Computing the mask" ] }, { @@ -199,11 +214,6 @@ "help(compute_mask)" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [] - }, { "cell_type": "markdown", "metadata": {}, @@ -240,7 +250,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## 3. Image contrast enhancement" + "## 4. Image contrast enhancement" ] }, { @@ -297,7 +307,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# 4. Image segmentation" + "# 5. Image segmentation" ] }, { @@ -315,28 +325,12 @@ "metadata": {}, "outputs": [], "source": [ - "path_to_labels = ...\n", - "\n", - "labels = tifffile.imread(path_to_labels)\n", + "labels = tifffile.imread(path_to_data / \"labels.tif\")\n", "\n", "if display_in_napari:\n", " viewer.add_labels(labels, name='labels')" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 5. Spatio-temporal registration" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "As stated above, we do not cover the spatio-temporal registration step in this notebook. We refer the reader to the `registration` notebook provided with this package." - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -372,19 +366,19 @@ "align_labels = align_array_major_axis(\n", " target_axis='X', rotation_plane='XY', # -> align the major axis with the X axis\n", " mask=mask_snp, array=labels, order=0, # order 0 for labels and masks\n", - " temporal_slice=slice(2, 10) # -> use the frames from time 2 to 10 to compute the major axis\n", + " # temporal_slice=slice(2, 10) # -> use the frames from time 2 to 10 to compute the major axis\n", ")\n", "\n", "aligned_data = align_array_major_axis(\n", " target_axis='X', rotation_plane='XY', # -> align the major axis with the X axis\n", " mask=mask_snp, array=enhanced_data, order=1,\n", - " temporal_slice=slice(2, 10) # -> use the frames from time 2 to 10 to compute the major axis\n", + " # temporal_slice=slice(2, 10) # -> use the frames from time 2 to 10 to compute the major axis\n", ")\n", "\n", "aligned_mask = align_array_major_axis(\n", " target_axis='X', rotation_plane='XY', # -> align the major axis with the X axis\n", " mask=mask_snp, array=mask_snp, order=0, # order 0 for labels and masks\n", - " temporal_slice=slice(2, 10) # -> use the frames from time 2 to 10 to compute the major axis\n", + " # temporal_slice=slice(2, 10) # -> use the frames from time 2 to 10 to compute the major axis\n", ")\n", "\n", "if display_in_napari:\n", @@ -441,39 +435,6 @@ " viewer.add_image(cropped_data, name='cropped data')\n", " viewer.add_labels(cropped_labels, name='cropped labels')" ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Segmentation postprocessing" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 1. Removing labels outside of mask" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Due to the presence of noise in the image, the segmentation can sometimes produce labels that are not fully contained in the mask. We provide the function `remove_labels_outside_of_mask` to remove these labels. It takes as input the labels and the mask, and removes the labels that are not fully contained in the mask." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "labels_filtered = remove_labels_outside_of_mask(mask=cropped_mask, labels=cropped_labels)\n", - "\n", - "if display_in_napari:\n", - " viewer.add_labels(labels_filtered, name='labels filtered')" - ] } ], "metadata": { @@ -492,7 +453,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.14" + "version": "3.10.16" } }, "nbformat": 4, diff --git a/notebooks/registration_notebook.ipynb b/src/tapenade/notebooks/registration_notebook.ipynb similarity index 67% rename from notebooks/registration_notebook.ipynb rename to src/tapenade/notebooks/registration_notebook.ipynb index 963f00d..b35189c 100644 --- a/notebooks/registration_notebook.ipynb +++ b/src/tapenade/notebooks/registration_notebook.ipynb @@ -21,31 +21,25 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "0d08deec", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "c:\\Users\\gros\\Anaconda3\\envs\\env-tapenade\\lib\\site-packages\\IO\\IO.py:26: UserWarning: KLB library is not installed\n", - " warnings.warn(\"KLB library is not installed\")\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "pyklb library not found, klb files will not be generated\n" - ] - } - ], + "outputs": [], "source": [ "from tapenade import reconstruction\n", "import numpy as np\n", "import tifffile\n", - "from pathlib import Path" + "from pathlib import Path\n", + "from tapenade import get_path_to_demo_folder" + ] + }, + { + "cell_type": "markdown", + "id": "b438bb82", + "metadata": {}, + "source": [ + "### Install the registration package by following the user manual available on Tapenade's GitHub page\n", + "" ] }, { @@ -65,14 +59,20 @@ "metadata": {}, "outputs": [], "source": [ - "path_ref_positions= ...\n", - "path_float_positions= ...\n", + "path_to_data = get_path_to_demo_folder()\n", + "\n", + "path_ref_positions = path_to_data / '96h_bottom_multipoints.xml'\n", + "path_float_positions = path_to_data / '96h_top_multipoints.xml'\n", "\n", - "reconstruction.plot_positions(path_ref_positions=path_ref_positions,path_float_positions=path_float_positions)\n", - "ordered_numbers_ref,ordered_numbers_float=reconstruction.associate_positions(\n", - " path_ref_positions=path_ref_positions,\n", + "reconstruction.plot_positions(\n", + " path_ref_positions=path_ref_positions,\n", " path_float_positions=path_float_positions\n", ")\n", + "ordered_numbers_ref, ordered_numbers_float=reconstruction.associate_positions(\n", + " path_ref_positions=path_ref_positions,\n", + " path_float_positions=path_float_positions\n", + ")\n", + "\n", "print(ordered_numbers_ref,ordered_numbers_float)" ] }, @@ -83,7 +83,7 @@ "source": [ "## Paths and channels name\n", "\n", - "If you used the function associate_positions generate automatically the list_ref and list_float from the number paired above, bottom with top\n", + "If you used the function associate_positions, you can generate automatically the list_ref and list_float from the number paired above, bottom with top\n", "\n", "```list_ref = [\"{:01d}_view1\".format(i) for i in ordered_numbers_ref]```\n", "\n", @@ -92,15 +92,11 @@ "\n", "\n", "If you did not use the function associate_positions, you can define the list_ref and list_float manually,\n", - "for example if you have one sample and 2 views :\n", "\n", - "```list_ref=['view1']``` and ```list_float=['view2']```\n", + "for example with 3 samples and 2 views :\n", "\n", - "or with 2 samples and 2 views :\n", + "```list_ref=['1_ref','2_ref','3_ref']``` and ```list_float=['1_float','2_float','3_float']```\n", "\n", - "```list_ref=['01','02']``` and ```list_float=['03','04']```\n", - "\n", - "with 01 and 03 the bottom and top views of sample 1\n", "\n" ] }, @@ -111,17 +107,16 @@ "metadata": {}, "outputs": [], "source": [ - "list_ref = []\n", - "list_float = []\n", + "# list_ref = [\"{:01d}_bottom\".format(i) for i in ordered_numbers_ref]\n", + "# list_float = [\"{:01d}_top\".format(i) for i in ordered_numbers_float]\n", + "list_ref = ['1_bottom']\n", + "list_float = ['5_top']\n", "channels = [\n", "\"hoechst\",\n", - "'ecad',\n", + "'ph3',\n", "'bra',\n", - "'sox2'\n", - "] # example of channels. If you have only one channel, just put one element in the list\n", - "\n", - "#path where you have your data saved\n", - "folder_experiment = ..." + "'ecad'\n", + "] # example of channels. If you have only one channel, just put one element in the list" ] }, { @@ -134,7 +129,7 @@ "\n", "Datatype should be int16, uint16 or float32, otherwise no output will be saved.\n", "\n", - "All files, reference and float, need to be in the same folder, folder_experiment.\n", + "All files, reference and float, need to be in the folder_experiment\n", "\n", "The cell below creates the folder structure necessary for the registration. \n" ] @@ -147,7 +142,7 @@ "outputs": [], "source": [ "reconstruction.create_folders(\n", - " folder_experiment= folder_experiment,\n", + " folder_experiment=path_to_data,\n", " list_ref=list_ref, list_float=list_float, channels=channels\n", ")" ] @@ -167,26 +162,32 @@ { "cell_type": "code", "execution_count": null, - "id": "5f5c2e08", + "id": "f1572b15", "metadata": {}, "outputs": [], "source": [ - "# from now on, we consider only one sample. If you have multiple samples, you can loop : list_ref[i]\n", - "i = 0 #index of sample in the list\n", + "i = 0\n", "filename_ref = list_ref[i]\n", "filename_float = list_float[i]\n", - "input_voxel = ... #voxel size of your input image (XYZ) for example [0.6,0.6,1]\n", - "output_voxel = ... #voxel size of the output registered image (XYZ), for example [1,1,1]\n", - "channel_reference = ... #name of the ubiquitous channel, for example 'hoechst'\n", - "\n", - "# #if you have a first idea of your tranformations (rotation, translation), you can input them here:\n", - "rot= ... #XYZ in degrees. (for example [180,0,0], which is a rotation of 180 degrees around the X axis, that we use when flipping the sample)\n", - "trans2= ... #translation applied after the rotation, XYZ in voxels. (for example [10,0,0])\n", - "\n", + "input_voxel = [0.3,0.3,2] #voxel size of your input image (XYZ)\n", + "output_voxel = [0.3,0.3,2] #voxel size of the output registered image (XYZ)\n", + "channel_reference = 'hoechst' #name of the ubiquitous channel\n", + "##if you have a first idea of your tranformations (rotation, translation), you can input them here:\n", + "rot = [180,0,0] #XYZ in degrees. ([180,0,0] is a rotation of 180 degrees around the X axis, that we use when flipping the sample)\n", + "trans2 = [0,0,0] #translation applied after the rotation, XYZ in voxels.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5f5c2e08", + "metadata": {}, + "outputs": [], + "source": [ "reconstruction.register(\n", - " path_data=Path(folder_experiment) / filename_ref / \"raw\",\n", - " path_transformation=Path(folder_experiment) / filename_ref / \"trsf\",\n", - " path_registered_data=Path(folder_experiment) / filename_ref / \"registered\",\n", + " path_data=Path(path_to_data) / filename_ref / \"raw\",\n", + " path_transformation=Path(path_to_data) / filename_ref / \"trsf\",\n", + " path_registered_data=Path(path_to_data) / filename_ref / \"registered\",\n", " reference_image=f\"{filename_ref}_{channel_reference}.tif\",\n", " floating_image=f\"{filename_float}_{channel_reference}.tif\",\n", " input_voxel=input_voxel,\n", @@ -194,16 +195,16 @@ " rot=rot,\n", " trans2=trans2,\n", " # input_init_trsf_from_plugin=... #path of the json file saved from the plugin\n", - " save_json=Path(folder_experiment) / filename_ref, #to save all parameters\n", + " save_json=Path(path_to_data) / filename_ref, #to save all parameters\n", ")\n", "\n", "##applying the same transformation to the other channels\n", "for channel in channels :\n", " if channel != channel_reference:\n", " reconstruction.register(\n", - " path_data=Path(folder_experiment) / filename_ref / \"raw\",\n", - " path_transformation =Path(folder_experiment) / filename_ref / \"trsf\",\n", - " path_registered_data=Path(folder_experiment) / filename_ref / \"registered\",\n", + " path_data=Path(path_to_data) / filename_ref / \"raw\",\n", + " path_transformation =Path(path_to_data) / filename_ref / \"trsf\",\n", + " path_registered_data=Path(path_to_data) / filename_ref / \"registered\",\n", " reference_image=f\"{filename_ref}_{channel}.tif\",\n", " floating_image=f\"{filename_float}_{channel}.tif\",\n", " input_voxel=input_voxel,\n", @@ -223,19 +224,27 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": null, "id": "5948e87d", "metadata": {}, "outputs": [], "source": [ - "channel=channel_reference #by default, we visualize using the reference channel but you can replace it here by : 'ecad', 'bra',...\n", - "scale = (output_voxel[2], output_voxel[1], output_voxel[0])\n", - "reconstruction.check_napari(\n", - " folder=Path(folder_experiment)/ f\"{filename_ref}\",\n", - " reference_image=f\"{filename_ref}_{channel}.tif\",\n", - " floating_image=f\"{filename_float}_{channel}.tif\",\n", - " scale=scale,\n", - ")" + "try:\n", + " import napari\n", + " napari_installed = True\n", + "except ImportError:\n", + " print(\"napari is not installed, skipping visualization step.\")\n", + " napari_installed = False\n", + "\n", + "if napari_installed:\n", + " channel = channel_reference #by default, we visualize using the reference channel but you can replace it here by any other channel\n", + " scale = (output_voxel[2], output_voxel[1], output_voxel[0])\n", + " reconstruction.check_napari(\n", + " folder=Path(path_to_data)/ f\"{filename_ref}\",\n", + " reference_image=f\"{filename_ref}_{channel}.tif\",\n", + " floating_image=f\"{filename_float}_{channel}.tif\",\n", + " scale=scale,\n", + " )" ] }, { @@ -257,19 +266,15 @@ "metadata": {}, "outputs": [], "source": [ - "# from now on, we consider only one sample. If you have multiple samples, you can loop : list_ref[i]\n", - "i = 0 #index of sample\n", - "filename_ref = list_ref[i]\n", - "filename_float = list_float[i]\n", - "\n", "for ch in channels:\n", " image = reconstruction.fuse_sides(\n", - " folder=Path(folder_experiment) / filename_ref,\n", - " reference_image=f\"{filename_ref}_{ch}.tif\",\n", + " folder=Path(path_to_data) / filename_ref,\n", + " reference_image = f\"{filename_ref}_{ch}.tif\",\n", " floating_image = f\"{filename_float}_{ch}.tif\",\n", - " folder_output=Path(folder_experiment) / filename_ref / \"fused\",\n", - " input_voxel=input_voxel,\n", - " output_voxel=output_voxel,\n", + " folder_output = Path(path_to_data) / filename_ref / \"fused\",\n", + " name_output = f\"fusion_{ch}.tif\",\n", + " input_voxel = input_voxel,\n", + " output_voxel = output_voxel,\n", " )\n", "\n", "#the result is saved channel by channel in the folder 'fused'." @@ -290,14 +295,9 @@ "metadata": {}, "outputs": [], "source": [ - "i = 0 #index of sample\n", - "filename_ref = list_ref[i]\n", - "filename_float = list_float[i]\n", - "\n", - "# the images should be named 'sampleid_channel.tif', eg 'fuseddata_dapi.tif', this depends on the argument \"name_output\" above.\n", "reconstruction.write_hyperstacks(\n", - " path=Path(folder_experiment) / filename_ref / \"fused\",\n", - " sample_id=\"fused_data\",\n", + " path=Path(path_to_data) / filename_ref / \"fused\",\n", + " sample_id=\"fusion\",\n", " channels=channels\n", ")\n", "#the result is saved under the name 'sample_id'_registered.tif in the folder 'fused'." @@ -374,7 +374,7 @@ ], "metadata": { "kernelspec": { - "display_name": "env-tapenade", + "display_name": "napari-mine", "language": "python", "name": "python3" }, @@ -388,7 +388,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.14" + "version": "3.10.16" } }, "nbformat": 4, diff --git a/src/tapenade/notebooks/segmentation_notebook.ipynb b/src/tapenade/notebooks/segmentation_notebook.ipynb new file mode 100644 index 0000000..111de79 --- /dev/null +++ b/src/tapenade/notebooks/segmentation_notebook.ipynb @@ -0,0 +1,327 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "ed08f86b", + "metadata": {}, + "source": [ + "# Segmentation Notebook" + ] + }, + { + "cell_type": "markdown", + "id": "86ea7a4a", + "metadata": {}, + "source": [ + "### After clicking on a code cell, press \"Shift+Enter\" to run the code, or click on the \"Run\" button in the toolbar above.
\n", + "\n", + "### Replace \"...\" signs with the appropriate path to your data.\n", + "
" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "96194952", + "metadata": {}, + "outputs": [], + "source": [ + "# from tapenade import get_path_to_demo_folder\n", + "from tapenade.preprocessing import (\n", + " global_contrast_enhancement,\n", + " local_contrast_enhancement,\n", + " segment_stardist\n", + ")\n", + "from tapenade import get_path_to_demo_folder\n", + "from tapenade.preprocessing import change_array_pixelsize\n", + "from tapenade.preprocessing.segmentation_postprocessing import remove_small_objects, remove_labels_outside_of_mask\n", + "import numpy as np\n", + "import tifffile\n", + "import matplotlib.pyplot as plt\n", + "import skimage\n", + "from pathlib import Path\n", + "from skimage.measure import regionprops" + ] + }, + { + "cell_type": "markdown", + "id": "6f469a65", + "metadata": {}, + "source": [ + "Enter the path to the image you want to segment. The mask is optional, but it is recommended if you use the local contrast enhancement method (and do not load data that is already enhanced that way) or if you need to post-process the segmentation results (e.g to remove labels outside the mask or touching the borders)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "54e5d627", + "metadata": {}, + "outputs": [], + "source": [ + "path_to_data = get_path_to_demo_folder()\n", + "\n", + "data = tifffile.imread(path_to_data / \"image_isotropized_enhanced.tif\")\n", + "mask = tifffile.imread(path_to_data / \"mask_isotropized.tif\") # optional, read text above" + ] + }, + { + "cell_type": "markdown", + "id": "643141b0", + "metadata": {}, + "source": [ + "## Quick note about pre-processing\n", + "\n", + "The StarDist model we provide works best with roundish objects of approximately 15 pixels in diameters in all directions, and with images that have been normalized between 0 and 1.\n", + "\n", + "If your image already has the appropriate size and is normalized between 0 and 1, you can skip this and directly go to the prediction.\n", + "\n", + "To prepare your data, we recommend using the preprocessing notebook, particularly the following steps:\n", + " - Adapting the image pixel size so that all objects are approximately 15 pixels in diameters in all directions using the `change_array_pixelsize` function. In our case, this meant resizing to the isotropic voxel size of (0.62, 0.62, 0.62) Β΅m/pix. Do not hesitate to try different sizes on a subset of your data to optimize the result.\n", + " - Normalizing the image values using `global_contrast_enhancement` or `local_contrast_enhancement`. Use the latter if your image is very deep, as it can improve performance in deeper planes by enhancing the contrast using local statistics." + ] + }, + { + "cell_type": "markdown", + "id": "49ac4b72", + "metadata": {}, + "source": [ + "For the purpose of this demo, the data we load has already been pre-processed, but in your case, you can go through the preprocessing notebook to prepare your data, or uncomment the following lines to run the pre-processing steps directly in this notebook if you are familiar with the functions already." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "76260a1b", + "metadata": {}, + "outputs": [], + "source": [ + "# data_isotropized = change_array_pixelsize(\n", + "# data,\n", + "# input_pixelsize=..., # replace with the input pixelsize in ZYX order, e.g. (1, 0.5, 0.5)\n", + "# output_pixelsize=(0.621, 0.621, 0.621), # isotropic pixelsize\n", + "# order=1, # interpolation order, 1 for images, 0 for masks and labels\n", + "# )\n", + "# mask_isotropized = change_array_pixelsize(\n", + "# mask,\n", + "# input_pixelsize=..., # replace with the input pixelsize in ZYX order, e.g. (1, 0.5, 0.5)\n", + "# output_pixelsize=(0.621, 0.621, 0.621), # isotropic pixelsize\n", + "# order=0, # interpolation order, 1 for images, 0 for masks and labels\n", + "# )\n", + "\n", + "# data_isotropized_normalized = local_contrast_enhancement(\n", + "# data_isotropized,\n", + "# mask=mask_isotropized,\n", + "# box_size=10,\n", + "# perc_low=1, perc_high=99\n", + "# )\n", + "\n", + "data_isotropized_normalized = data # demo data is already preprocessed\n", + "mask_isotropized = mask" + ] + }, + { + "cell_type": "markdown", + "id": "6a497ece", + "metadata": {}, + "source": [ + "## Run the prediction using StarDist3D" + ] + }, + { + "cell_type": "markdown", + "id": "fbf85050", + "metadata": {}, + "source": [ + "We provide the function `segment_stardist`, which can be used to detect nuclei in 3D images using the StarDist model. The folder containing the weights and config is automatically downloaded in this demo, and can also be found [here](https://zenodo.org/records/14748083)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9c13606f", + "metadata": {}, + "outputs": [], + "source": [ + "path_stardist_model = Path(path_to_data / \"tapenade_stardist\") # folder containing weights\n", + "\n", + "labels = segment_stardist(\n", + " data_isotropized_normalized, # data already preprocessed\n", + " path_stardist_model,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "6e231d75", + "metadata": {}, + "source": [ + "If you want to resize the labels back to the original pixel size, you can use the function `resize` from the package `skimage.transform`. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7d01b380", + "metadata": {}, + "outputs": [], + "source": [ + "labels_at_array_pixelsize = skimage.transform.resize(\n", + " labels,\n", + " data.shape,\n", + " anti_aliasing=False,\n", + " order=0,\n", + " preserve_range=True,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "9d2a2834", + "metadata": {}, + "source": [ + "If you want to save the results, you can use the `tifffile.imwrite` function." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fbb9edb5-a77f-4e43-a135-d2b1ecec1e3a", + "metadata": {}, + "outputs": [], + "source": [ + "tifffile.imwrite(..., labels) # replace ... with the path where you want to save the labels as a tif file" + ] + }, + { + "cell_type": "markdown", + "id": "ad967338", + "metadata": {}, + "source": [ + "## Check results (napari required)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7df6f7b6", + "metadata": {}, + "outputs": [], + "source": [ + "try:\n", + " import napari\n", + " viewer=napari.Viewer()\n", + " viewer.add_image(data_isotropized_normalized, colormap='inferno')\n", + " viewer.add_labels(labels)\n", + " napari.run()\n", + "except ImportError:\n", + " print(\"Napari is not installed, skipping visualization.\")" + ] + }, + { + "cell_type": "markdown", + "id": "15a9b05c", + "metadata": {}, + "source": [ + "## Post-processing" + ] + }, + { + "cell_type": "markdown", + "id": "d2da84d5", + "metadata": {}, + "source": [ + "### 1. Removing labels outside of mask (inside/outside of the tissue)" + ] + }, + { + "cell_type": "markdown", + "id": "cca10224", + "metadata": {}, + "source": [ + "Due to the presence of noise in the image, the segmentation can sometimes produce labels that are not fully contained in the mask. We provide the function `remove_labels_outside_of_mask` to remove these labels. It takes as input the labels and the mask, and removes the labels that are not fully contained in the mask.\n", + "\n", + "To create a mask of your image, use the preprocessing notebook or the napari plugin napari-tapenade-processing" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ab3679bf", + "metadata": {}, + "outputs": [], + "source": [ + "labels_filtered = remove_labels_outside_of_mask(\n", + " mask=mask_isotropized, \n", + " labels=labels\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "cdfe7e0e", + "metadata": {}, + "source": [ + "### 2 - Filter small volumes in the segmentation.\n", + "\n", + "First, plot the histogram of cell volumes to evaluate the threshold" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5548ca09", + "metadata": {}, + "outputs": [], + "source": [ + "props=regionprops(labels)\n", + "volumes = np.array([prop.area for prop in props])\n", + "\n", + "plt.hist(volumes, bins=100)\n", + "plt.title('Histogram of cell volumes')" + ] + }, + { + "cell_type": "markdown", + "id": "efdc7a5a", + "metadata": {}, + "source": [ + "Then, remove the objects smaller than ```size_min```" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ece95ad9", + "metadata": {}, + "outputs": [], + "source": [ + "#choose the size to filter\n", + "size_min = 400\n", + "labels_filtered = remove_small_objects(labels,size_min)\n", + "\n", + "print('Before filtering :',len(np.unique(labels)),'labels \\nAfter filtering :',len(np.unique(labels_filtered)),'labels')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "napari-mine", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.16" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/tapenade/notebooks/spatial_correlation_analysis_notebook.ipynb b/src/tapenade/notebooks/spatial_correlation_analysis_notebook.ipynb new file mode 100644 index 0000000..7bd96ad --- /dev/null +++ b/src/tapenade/notebooks/spatial_correlation_analysis_notebook.ipynb @@ -0,0 +1,213 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Multiscale analysis of spatial correlation between 2 signals" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### After clicking on a code cell, press \"Shift+Enter\" to run the code, or click on the \"Run\" button in the toolbar above.
\n", + "\n", + "### Replace \"...\" signs with the appropriate path to your data.\n", + "
" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "from tapenade.analysis.spatial_correlation import (\n", + " SpatialCorrelationPlotter\n", + ")\n", + "from tapenade import get_path_to_demo_folder\n", + "from pathlib import Path\n", + "import tifffile" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1. Data loading" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Load your the two signals that you wish to study the correlation between. If raw image data is loaded, the correlation will be studied at the smallest scale (voxel). To study the correlation at larger scales, we recommend applying our masked gaussian filter method, as presented in the `masked_gaussian_smoothing` notebook." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "path_to_data = get_path_to_demo_folder()\n", + "\n", + "signal1 = tifffile.imread(path_to_data / \"dapi_isotropized.tif\")\n", + "signal2 = tifffile.imread(path_to_data / \"bra_isotropized.tif\")\n", + "\n", + "# optional but highly recommended\n", + "mask = tifffile.imread(path_to_data / \"mask_def_corr.tif\")\n", + "\n", + "# optional but recommended if the signals belong to individual instances (e.g nuclei)\n", + "labels = tifffile.imread(path_to_data / \"labels_def_corr.tif\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2. Initializing the plotter" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Initializing the plotter once allows you to test several parameters of the visualization without reprocessing the data." + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "spatial_correlation_plotter = SpatialCorrelationPlotter(\n", + " quantity_X=signal1,\n", + " quantity_Y=signal2,\n", + " mask=mask, # optional\n", + " labels=labels # optional\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Plot the correlation:" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/tmp/ipykernel_85973/16992269.py:2: UserWarning: FigureCanvasAgg is non-interactive, and thus cannot be shown\n", + " fig.show()\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAApwAAAGGCAYAAADW7lHFAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAd7dJREFUeJzt3Xl8TFf/B/DPnckqySQSshHEUmsstSalaFOhqpRHUWqp0gW19EGppVQbO7VXi/CUUi3aahslKCXUXlRTIRVbQpGVJJOZ8/tjan7GTCJ3zJp83s/r/n7NPeee+c41Sb45997vkYQQAkREREREVqKwdwBEREREVLox4SQiIiIiq2LCSURERERWxYSTiIiIiKyKCScRERERWRUTTiIiIiKyKiacRERERGRVTDiJiIiIyKqYcBIRERGRVTHhJCIiIiKrYsL5gH379qFLly4IDQ2FJEnYtm2brOOTkpLQvn17BAUFwcPDA9WrV8ekSZOgVqutEzARERGRE3CxdwCOJDc3F40aNcJrr72G7t27yz7e1dUV/fv3x5NPPgk/Pz+cOnUKQ4YMgVarxccff2yFiImIiIgcHxPOB3Tq1AmdOnUqsj0/Px/vv/8+vvzyS2RkZKBBgwaYNWsW2rVrBwCoXr06qlevru9ftWpV7N27F/v377d26EREREQOi5fUZRg+fDgSExOxceNG/P777+jZsyc6duyI8+fPm+yfnJyM+Ph4tG3b1saREhERETkOSQgh7B2EI5IkCVu3bkW3bt0AAKmpqahevTpSU1MRGhqq7xcdHY0WLVoYXDKPiorC8ePHkZ+fj6FDh2L58uVQKJjbExERUdnES+oldPr0aWg0GjzxxBMG+/Pz8xEQEGCwb9OmTcjOzsapU6cwduxYzJ07F+PGjbNluEREREQOgwlnCeXk5ECpVOLYsWNQKpUGbd7e3gZfh4WFAQDq1asHjUaDoUOH4t133zU6joiIiKgsYMJZQk2aNIFGo8GNGzfQpk2bEh+n1WqhVquh1WqZcBIREVGZxITzATk5OUhOTtZ/nZKSgpMnT8Lf3x9PPPEE+vbti/79+2PevHlo0qQJbt68iYSEBDRs2BCdO3fG+vXr4erqioiICLi7u+Po0aOYMGECevXqBVdXVzu+MyIiIiL74UNDD9i7dy/at29vtH/AgAGIi4uDWq3GjBkzsG7dOly9ehUVKlRAq1atMG3aNERERGDTpk2YPXs2/vrrLwghULVqVfTr1w+jR4+Gh4eHHd4RERERkf0x4SQiIiIiq2KtHiIiIiKyKiacRERERGRVZf6hIa1Wi2vXrsHHxweSJNk7HCIiIjJBCIHs7GyEhobaZTGVvLw8FBQUmHWsm5tbmX+Wo8wnnNeuXdPXzSQiIiLHdvnyZVSuXNmmr5mXl4dq4d5IT9OYdXxwcDBSUlLKdNJZ5hNOHx8fALoPsEqlsnM0REREVpKVBUyYAHzxhen2F18EFiwAKlSwbVwllJWVhbCwMP3vbVsqKChAepoGZ89Xg49K3uxqdpYW9Wv9jYKCAiacZdn9y+gqlYoJJxERlU67dwODBgGpqcZt5csDS5cCvXsDTnBrmT1vf/NRKaCSmXCSTplPOImIiEqt3FzgvfeAJUtMtz//PPDZZ0BoqG3jclKSFpC08hJeSWulYJwME04iIqLS6OBBYMAA4IEV9PS8vYGFC4HXXnOKWU2HISTdJvcYYsJJRERUquTnA1OnAnPmAFoT02vt2wOrVwPVqtk8NGcnaSUzZjiZcAJMOImIiEqP48eB/v2Bs2eN2zw9gVmzgGHDADuUFSoNdJfU5R9DTDiJiJxeeY+ZsvrfyXvPSpFYn5z36szvUza1GoiNBT78ECgsNG5v1QpYuxZ44gnbx1aaaP/d5B5DTDiJiIic2h9/6GY1jx0zbnNzA6ZPB/77X0CptH1sRP9iwklEROSMNBpd3cxJk3T3bT6scWNg3TogIsLmoZVWktBtco8hJpxERETOJzkZGDgQOHDAuE2pBCZO1CWibm42D600k4QZ93Ay4QTAhJOIiMh5aLXAihXA2LHA3bvG7XXr6u7VbN7c9rGVBVqh2+QeQ0w4iYiInMLly7q6mbt2GbdJEjBmjO6hIU9P28dWRvCSuvmYcBIRETkyIXSzliNH6tZDf1j16kBcHNCmjc1DK3P4lLrZmHASETm5slT+pyy9VwBAWhrwxhvAd9+Zbn/rLWD2bN3KQUQOjAknERGRI9q8WZdQ3rpl3Fa5MrBqFdChg+3jKsMkrYAk855Muf1LKy41QERE5Ehu3QL69AFeftl0sjlgAHD6NJNNe9CauRFnOImIiBzG9u3AkCG6S+kPCwwEVq4Euna1fVwEgA8NPQ7OcBIREdlbZiYweDDQpYvpZPM//9Gtj85k0744w2k2znASERHZU0KCrtxRaqpxW/nywLJlQK9eutJHZFeS1ozC70w4AXCGk4iIyD5yc4ERI4DoaNPJZufOwJkzQO/eTDbJ6XGGk4iIyNYOHtQ9/JOcbNzm4wMsXAgMGsRE09EI6Oqiyj2GmHASERHZTF4eMHUqMHeubpnKh7VvD6xZA1StavvY6JG4lrr5mHASERHZwvHjQP/+uod/HubpCcyaBQwbBih4t5vD4kpDZnOoT/Xy5cvRsGFDqFQqqFQqREZG4qeffiqyf1xcHCRJMtg8PDxsGDEREdEjqNXAtGlAy5amk81WrYCTJ3X3czLZdGj3yyLJ3cjBZjgrV66MmTNnolatWhBCYO3atejatStOnDiB+vXrmzxGpVIhKSlJ/7XE+12IiMhRnD2rm9U8fty4zc0NmD4d+O9/AaXS9rGRfJzhNJtDJZxdunQx+Pqjjz7C8uXLcejQoSITTkmSEBwcbIvwiIiISkajAebPByZNAgoKjNubNAHWrgUiImwfG5EdOOzcvUajwcaNG5Gbm4vIyMgi++Xk5KBq1aoICwtD165dcdbU5YoH5OfnIysry2AjIiKymORkoG1bYNw442RTqQSmTAEOHWKy6YxY+N1sDpdwnj59Gt7e3nB3d8ebb76JrVu3ol69eib71q5dG6tXr8a3336LL774AlqtFlFRUbhy5UqR48fGxsLX11e/hYWFWeutEBFRWaLVAkuXAo0aAQcOGLfXratLNKdN011OJ6ejuydTkrnZO2rHIAkht6CUdRUUFCA1NRWZmZn4+uuv8fnnn+OXX34pMul8kFqtRt26ddGnTx98+OGHJvvk5+cjPz9f/3VWVhbCwsKQmZkJlUplsfdBRGSu8h4zZfW/k/eelSKhEktN1S1NuWuXcZskAe++C3z4IcAHW82WlZUFX19fu/y+vv/at3+tDpW3vPtts3I08G99scznGQ51DycAuLm5oWbNmgCApk2b4siRI/jkk0/w6aefPvJYV1dXNGnSBMmmCun+y93dHe7u7haLl4iIyjAhdPdijhwJmLpFq3p1XXvr1raPjSyPDw2ZzeEuqT9Mq9UazEgWR6PR4PTp0wgJCbFyVEREVOalpQFdu+pWBDKVbL79NnDqFJPN0kSYuZFjzXBOmDABnTp1QpUqVZCdnY0NGzZg79692LFjBwCgf//+qFSpEmJjYwEA06dPR6tWrVCzZk1kZGRgzpw5uHTpEl5//XV7vg0iIirtvvoKeOst4PZt47bKlYHVq4HnnrN9XEQOyqESzhs3bqB///64fv06fH190bBhQ+zYsQPP/ftNm5qaCsUDRXHv3LmDIUOGIC0tDeXLl0fTpk1x8ODBEt3vSUREJNutW7rVgDZtMt0+cCCwYAHg52fLqMhGJK0ESSuv3rfc/qWVwz00ZGv2vAmZiMgUPjTkoL7/Hhg6VHcp/WFBQcDKlcCLL9o+rjLCER4aurO7hlkPDZV/5kKZzzMcaoaTiIjI4WRmAqNHA2vWmG7v2RNYtgyoUMG2cZHtCQmQO2MpOMMJMOEkIgtypJk5R4pFLmvH4sznxuYSEnQPBV2+bNzm769LNHv1sn1cZB98St1sDv+UOhERkc3l5gLDhwPR0aaTzc6dgTNnmGyWNXxK3WxMOImIiB504IButaClS43bfHyAVat093OyBB9Zwb59+9ClSxeEhoZCkiRs27ZN36ZWqzF+/HhERETAy8sLoaGh6N+/P65du2Ywxu3bt9G3b1+oVCr4+flh8ODByMnJsfE7McSEk4iICADy8nTrn7dpA1y4YNz+zDPA6dPAa6/pVg+iskcrmbfJkJubi0aNGmGpiT947t69i+PHj2Py5Mk4fvw4tmzZgqSkJLz40MNqffv2xdmzZ7Fz505s374d+/btw9ChQx/rrT8u3sNJRER07BjQvz/wxx/GbZ6ewOzZukLuCs7TlGlCkv8QkMz+nTp1QqdOnUy2+fr6YufOnQb7lixZghYtWiA1NRVVqlTBuXPnEB8fjyNHjqBZs2YAgMWLF+P555/H3LlzERoaKi9+C+F3DhERlV1qNfDBB0CrVqaTzchI3WpBw4cz2SRIWvM2QFda6cGtpKsoPkpmZiYkSYLfv7VfExMT4efnp082ASA6OhoKhQKHDx+2yGuag989RERUNp09q0s0p00DCgsN29zcgFmzgP37gVq17BMfOZ7HuKQeFhYGX19f/XZ/1cTHkZeXh/Hjx6NPnz76Gp9paWkIDAw06Ofi4gJ/f3+kmaohayO8pE5ERGWLRgPMnw9MmgQUFBi3N2kCrFsHNGhg+9jIsZnz1Pm//S9fvmxQ+N3d3f2xQlGr1Xj55ZchhMDy5csfayxbYMJJRKVSma4d+Qhl+twkJwMDBgAHDxo1FUKBucr2mHe2PQqbbQewvdihyvR5LAbrvJqmUqksttLQ/WTz0qVL2L17t8G4wcHBuHHjhkH/wsJC3L59G8HBwRZ5fXPwkjoREZV+Wq2uzFGjRiaTTdSrh+dc38Isl2gUSvKWLqQyxAZPqT/K/WTz/Pnz2LVrFwICAgzaIyMjkZGRgWPHjun37d69G1qtFi1btrRoLHJwhpOIiEq31FRdKaOEBOM2SQLefRf48EOc9Fto89DIydjgKfWcnBwkJyfrv05JScHJkyfh7++PkJAQ/Oc//8Hx48exfft2aDQa/X2Z/v7+cHNzQ926ddGxY0cMGTIEK1asgFqtxvDhw9G7d2+7PaEOMOEkIqLSSgggLg4YNQrIyjJur1FD1966tY0DI6dlg6Utjx49ivbt2+u/HjNmDABgwIAB+OCDD/Ddd98BABo3bmxw3J49e9CuXTsAwPr16zF8+HA8++yzUCgU6NGjBxYtWiQzcMtiwklERKXP9evA0KHA9iLuwxw2TPcUupeXbeMi52aDGc527dpBiKKfTCqu7T5/f39s2LBB1utaGxNOIiIqXTZt0hVpv33buC0sDFi9WrdGOpFMQkgQMu/JFHIT1FKKDw0REVHp8M8/QK9eQO/eppPNQYN0S1My2SSyOc5wElGx5JQ4caTyJo5WmsVZz6PT+P57YMgQID3duC0oCFi5EnhovemH8bxbRqk+jza4pF5acYaTiIicV2ambubyxRdNJ5svvwycOfPIZJOoRLRmbsQZTiIiclK7dunKHV2+bNzm7w8sW6a7xE5kKZzhNBsTTiIici65ucC4cbqE0pQXXtBdQg8JsW1cVPqZU8jdwoXfnRUTTiIich4HDuiWprxwwbjNxwf45BNg4EBdQXciS+MMp9l4DycRETm+vDzdrGabNqaTzWee0T2BPmgQk00iB8QZTiIicmzHjgH9+wN//GHc5ukJzJkDvPUWoOAcClkZL6mbjQknERXLWUucyI3b2mWUvISrw8Qil91KOqnVwEcfATNmABqNUfNhqSreLvwPLr6bDbw727qxEAGA+HeTewwx4SQiIgd05oxuVvPECaOmfCjxkbIDlipbQytxVpNsR2jNWGmIM5wAmHASEZEj0WiAefOAyZOBggKj5pNSKN526YlzimA7BEdlHh8aMhsTTiIicgznz+ueQE9MNG5TKoFJk/BcrCsKJaXtYyMCeA/nY+C1CCIisi+tFliyBGjUyHSyWa8ecPgw8MEHTDaJnBRnOImIyH4uXdKtFrR7t3GbJAFjxwLTpgEeHraPjehhAmZcUrdKJE7HoWY4ly9fjoYNG0KlUkGlUiEyMhI//fRTscds3rwZderUgYeHByIiIvDjjz/aKFoiIjKbEMDq1UBEhOlks0YNYP9+YNYsJpvkOIT0/5fVS7rxHk4ADpZwVq5cGTNnzsSxY8dw9OhRPPPMM+jatSvOnj1rsv/BgwfRp08fDB48GCdOnEC3bt3QrVs3nDlzxsaRExFRiV2/Drz4IjB4MJCdbdw+bBhw6hTw1FO2j42oGEKYtxEgCeHYp8Lf3x9z5szB4MGDjdp69eqF3NxcbN++Xb+vVatWaNy4MVasWFGi8bOysuDr64vMzEyoVCqLxU3kqOTWeHQVJb9nTi0Z10q0JGvWVZR7XuRypJqQdq3zuWkT8PbbwO3bxm1hYbpZz+hoy70elRr2/H19/7VvrWgOlae8uxGz7hUi4M0jZT7PcKgZzgdpNBps3LgRubm5iIyMNNknMTER0Q/9YIqJiUGiqZvOiYjIfv75B+jVC+jd23SyOWiQbmlKJpvkyOReTjfnqfZSyuEeGjp9+jQiIyORl5cHb29vbN26FfXq1TPZNy0tDUFBQQb7goKCkJaWVuT4+fn5yM/P13+dlZVlmcCJiMi0774Dhg4F0tON24KCgM8+A7p0sX1cRGQzDjfDWbt2bZw8eRKHDx/GW2+9hQEDBuAPU+vnmik2Nha+vr76LSwszGJjExHRAzIzdTOXXbuaTjZ79QLOnmWySU5DCMmsjRww4XRzc0PNmjXRtGlTxMbGolGjRvjkk09M9g0ODkb6Qz/E0tPTERxc9AoUEyZMQGZmpn67fPmyReMnIiIAu3YBDRoAcXHGbf7+wMaNui0gwOahEZmNl9TN5nAJ58O0Wq3BJfAHRUZGIiEhwWDfzp07i7znEwDc3d31ZZfub0REZCE5ObqnzJ97Drhyxbj9hRd0s5q9etk+NqLHdX9pS7kbOdY9nBMmTECnTp1QpUoVZGdnY8OGDdi7dy927NgBAOjfvz8qVaqE2NhYAMDIkSPRtm1bzJs3D507d8bGjRtx9OhRrFy50p5vg4iobPr1V2DgQODCBeM2lQr45BPd0pUSfwGTczLnEjkvqes4VMJ548YN9O/fH9evX4evry8aNmyIHTt24LnnngMApKamQqH4/0nZqKgobNiwAZMmTcLEiRNRq1YtbNu2DQ0aNLDXWyAqdeSUOpJTQknu2NZm7bJFckoROVIJpRLJywMmTwbmzTNddPDZZ3XljqpUsX1sRJak/XeTeww5VsK5atWqYtv37t1rtK9nz57o2bOnlSIiIqJiHT0K9O8PnDtn3FauHDB7NvDWW4DC4e/gIiIrcqiEk4iInERBAfDRR7pNY2KmOipK98BQrVo2D43Iasy5J5OX1AEw4SQiIrnOnNHNap44Ydzm5gbMmAGMGQMo5d1iQeTohFaCkPnUudz+pRUTTiIiKhmNBpg7F5gyRTfD+bAnnwTWrQPq17d9bES2wBlOszHhJCKiRzt/XveEuamlg11cgEmTgIkTAVdX28dGZCN8St18TDiJiKhIktACixcD48cD9+4Zd6hfH1i7Fmja1PbBEdmaMKOQOxNOAEw4iWzCkUriyB1fTuxyyxxZMxa5Y1d2nyer/5X8d2X1d0Zh4g4Wq78B3jFRV1OSgLFjgWnTAA8P2wdHRE6FCScRERkSAn21x/Bx4XaoYGKlt5o1dbOaUVG2j43InngPp9lYGI2IiPSCRBY2Fq7FksJvTCebw4cDJ08y2aQySQjzNjn27duHLl26IDQ0FJIkYdu2bQ/FIDBlyhSEhITA09MT0dHROH/+vEGf27dvo2/fvlCpVPDz88PgwYORk5PzmO/+8TDhJCIiQAh015xCYsFCxGiTjNvDwoBdu3T3c3p52T4+IkeglczbZMjNzUWjRo2wdOlSk+2zZ8/GokWLsGLFChw+fBheXl6IiYlBXl6evk/fvn1x9uxZ7Ny5E9u3b8e+ffswdOjQx3rrj4uX1ImIyjh/kYu5hd/iJe1p0x1eew2YPx/w9bVtYEQOxhZPqXfq1AmdOnUqYiyBhQsXYtKkSejatSsAYN26dQgKCsK2bdvQu3dvnDt3DvHx8Thy5AiaNWsGAFi8eDGef/55zJ07F6GhobLisRTOcBIRlWGdNH8gsWChyWQzDT7o7dIfWLWKySYR8P/3cMrdAGRlZRls+fkmbll5hJSUFKSlpSE6Olq/z9fXFy1btkTivyXLEhMT4efnp082ASA6OhoKhQKHDx9+zBNgPiacRERlkErcw1L1Zmwo/B8CYXxv1zeKhohyG4kdyrp2iI6o9AkLC4Ovr69+i42NlT1GWloaACAoKMhgf1BQkL4tLS0NgYGBBu0uLi7w9/fX97EHXlInIipj2mnPY4n6G1RCplHbLZTDf126YpuyoR0iI3Jsj7O05eXLl6FSqfT73d3dLRqbo2PCSWQGOfUgAcBXW/IfLHLHlltvMtB9jqz+kPGz1do1ROWQ+z5v5I+1UiQ6rsIB1hXPyQHGjcNW9WrT7V26IGDlSqwJDsYa20ZG5BwEzCiLpPt/KpXKIOE0R3BwMAAgPT0dISEh+v3p6elo3Lixvs+NGzcMjissLMTt27f1x9sDL6kTEZUF+/cDjRoBy5cbt6lUwJo1wLffAnb8hUTk6O4/NCR3s5Tw8HAEBwcjISFBvy8rKwuHDx9GZGQkACAyMhIZGRk4duyYvs/u3buh1WrRsmVLi8UiF2c4iYhKs7w83Trn8+ebLggYHa17KKhKFdvHRuRszChzJLd/Tk4OkpOT9V+npKTg5MmT8Pf3R5UqVTBq1CjMmDEDtWrVQnh4OCZPnozQ0FB069YNAFC3bl107NgRQ4YMwYoVK6BWqzF8+HD07t3bbk+oA0w4iYhKr6NHgf79gXPnjNvKlQPmzAHefBNQ8GIXUUmYU8hdbv+jR4+iffv2+q/HjBkDABgwYADi4uIwbtw45ObmYujQocjIyEDr1q0RHx8PjweWmF2/fj2GDx+OZ599FgqFAj169MCiRYvkBWJhTDiJiEqbggJgxgzg448BjYn17Z96CoiL0y1RSUQOpV27dhDFZKmSJGH69OmYPn16kX38/f2xYcMGa4RnNiacRESlyenTwIABwIkTxm1ubsBHHwGjRwNKB3iIicjJ2KLwe2nFhJOIqDTQaIC5c4EpU3QznA9r2hRYuxaoX9/2sRGVFja4h7O0YsJJZAa5JW4yFfJXlCgp2WWOZLJmSScv4SovGBk/t9WSiUvJdiSn7JLc81hDexPLCjejhbhs1KaGAnOUz2DB6XYobPo9gO8dqnwVkTPhDKf5mHASETkpSWjxuvYQPiiMRzmojdrPSUF406UnfldUskN0RKWRJL8Op5y/lEsxJpxERE4oTNzBEvXXeFpcNGrTQMJi5dOIVUajQOKPeSJL4Qyn+fiTiIjImQiBftqj+KjwB6hgfKvGBSkAb7v0xG+KqnYIjojINCacREROIlhk4ZPCLeigTTLZ/qkyEtOVHXFXcrNxZERlBB8aMhsTTiIiRycEemhPYU7hdyiPe0bNl+GHYa7/wX5FDTsER1R22KLwuyPKysrC7t27Ubt2bdStW9esMZhwEhE5sACRg7mF36Kb9ozJ9nWKZpjk0hnZkofJdiKynLJyD+fLL7+Mp59+GsOHD8e9e/fQrFkz/P333xBCYOPGjejRo4fsMZlwEkF+GZo7+fLKylizdFE5Ie/buFDSyup/Vyoscd8AraessW8pjGfriiOnHJWcMkSAGZ8BG5QW6qT5AwsLtyIQOUZtafDBKJeXsENZl2WOiGxFmPGUuhMmnPv27cP7778PANi6dSuEEMjIyMDatWsxY8YMsxJOLqBLRORgVOIelqm/wobC/5lMNr9RNESU20jsUJp3aYuIzKSVIGRuzngPZ2ZmJvz9/QEA8fHx6NGjB8qVK4fOnTvj/PnzZo3pUAlnbGwsmjdvDh8fHwQGBqJbt25ISjJ9c/x9cXFxkCTJYHtwAXsiIqfy8884WLAQfbTGS1PeQjkMdHkFr7v2wR3Jyw7BEVFZEBYWhsTEROTm5iI+Ph4dOnQAANy5c8fsHMuhEs5ffvkFw4YNw6FDh7Bz506o1Wp06NABubm5xR6nUqlw/fp1/Xbp0iUbRUxEZCE5OcBbbwExMaiELKPmHxV1EeU2Ct8qI+wQHBEB/38Pp9zN2YwaNQp9+/ZF5cqVERoainbt2gHQXWqPiDDvZ5BD3cMZHx9v8HVcXBwCAwNx7NgxPP3000UeJ0kSgoODrR0eEZF17N8PDBwIXDQu4p4Fd4x36YKNiicByfl+cRGVKmXkHs63334bLVq0wOXLl/Hcc89BodDNT1avXh0zZswwa0yHSjgflpmZCQD6+wiKkpOTg6pVq0Kr1eLJJ5/Exx9/jPr169siRCIi8927B0yeDMyfb7J2yh6pJka49sBVyc/2sRGRkbLylDoANGvWDM2aNTPY17lzZ7PHc9iEU6vVYtSoUXjqqafQoEGDIvvVrl0bq1evRsOGDZGZmYm5c+ciKioKZ8+eReXKlY365+fnIz///1fnyMoyvnRFRGR1R44AAwYA584ZNeXCFVNcnsdqRUvOahI5EKHVbXKPcQZjxowpcd/58+fLHt9hE85hw4bhzJkz+PXXX4vtFxkZicjISP3XUVFRqFu3Lj799FN8+OGHRv1jY2Mxbdo0i8dLRFQiBQXAjBnAxx8DGo1xe+vWaPNbS6RIFWwfGxEVrxRfUj9xwvhBRVMkM/8IdsiEc/jw4di+fTv27dtncpayOK6urmjSpAmSk5NNtk+YMMEgi8/KykJYWNhjxUuOx9o1FSu7z5PVX05NSLmxF8p89s9FyHxW0Io/K+XU1ZRL7nm0Zix6p08D/fsDJ08at7m7Ax99BIwahRQveXVbHbGGKBE5lz179lh1fIdKOIUQGDFiBLZu3Yq9e/ciPDxc9hgajQanT5/G888/b7Ld3d0d7u7ujxsqEVHJFRYCc+cCU6YAarVxe9OmwLp1QL16to+NiEqsLN3DaWkOlXAOGzYMGzZswLfffgsfHx+kpaUBAHx9feHpqVvBpH///qhUqRJiY2MBANOnT0erVq1Qs2ZNZGRkYM6cObh06RJef/11u70PIiK9v/7S3at56JBxm4uLLgl97z3A1dX2sRGRLKU54ezevXuJ+27ZskX2+A6VcC5fvhwA9PWe7luzZg0GDhwIAEhNTdU/ng/oipAOGTIEaWlpKF++PJo2bYqDBw+iHmcKiMietFpgyRJdMnnPxBKeDRroZjWbNLF9bERknlJ8D6evr69Vx3eohFOYKAvysL179xp8vWDBAixYsMBKERERmeHvv4FBg4CHfl4BABQKYNw44IMPdPdtEpHTEAK65SplHuMM1qxZY9XxHWqlISIipyYE8PnnQESE6WSzVi3g11+B2Fgmm0ROqKysNAQAhYWF2LVrFz799FNkZ2cDAK5du4acnByzxnOoGU4iIqd17RowZAjw44+m20eM0CWaXlwDnYgc26VLl9CxY0ekpqYiPz8fzz33HHx8fDBr1izk5+djxYoVssdkwkmlktyyL3LLynhB3gMecsb3EvLGLoC8qsIuMi9seAs3Wf3lKCfk/QgqlEr+XtUwUeOyuP6SvP76f1Mh8B/tKcwu/A7lYXyv5mX4YZjrf/DdokWyxpfD2mWO5Hx+WXKJSjXx7yb3GCczcuRINGvWDKdOnUJAQIB+/0svvYQhQ4aYNSYTTiIiMwWIHMwr/BZdtWdMtq9TNMMkl87IljxsHBkRWUNpfkr9Qfv378fBgwfh5mY44VCtWjVcvXrVrDGZcBIRmeF5zVksKNyGQBjfz5QGH4x06Y6flXXsEBkRWUtZSTi1Wi00JlZCu3LlCnx8fMwakw8NERHJoBL3sFz9FdYXfmEy2fxa0QhRbiOZbBKVQkIrmbU5mw4dOmDhwoX6ryVJQk5ODqZOnVrkwjqPwhlOIqISaq/9C4vV36ASsozabqEc3nXphm+VEXaIjIjIcubNm4eYmBjUq1cPeXl5eOWVV3D+/HlUqFABX375pVljMuEkInoEL5GP6YU/4TXtYZPtPyrqYpTLS7gpmXepiYicRCku/P6gypUr49SpU9i0aRNOnTqFnJwcDB48GH379tWv/CgXE04iomJEaVOwVL0Z1XDHqC0L7hjn8iI2KZoAkvP9UiEiecrKPZwA4OLigr59+6Jv376WGc8ioxCZQW4pIkcqtyK3FJEcLkLerdUeMr+NC60Yex4KrTY2AFzJf7fEfeV+vh7mIdR4X/Mz3tYcgMJUXZPnnoNq1SqsCAuD/Ip0xXOkzzrgePEQ2UtZSThjY2MRFBSE1157zWD/6tWrcfPmTYwfP172mHxoiIjoIU20l7FXvRjDNb8aJZu5cMW7Ll2BHTuAsDA7RUhE9iCEeZuz+fTTT1GnjvGDj/Xr1zer6DvAhJOISM9VFGJi4c/4Wb0CtcVNo/ZEqRpau43EamUrXkInKoNssbSlRqPB5MmTER4eDk9PT9SoUQMffvghxAOZqxACU6ZMQUhICDw9PREdHY3z589b7H2mpaUhJCTEaH/FihVx/fp1s8ZkwklEBKC+9joS1MswVrMHLg/ddpAHF7yvfB4vuA7B31JAESMQET2+WbNmYfny5ViyZAnOnTuHWbNmYfbs2Vi8eLG+z+zZs7Fo0SKsWLEChw8fhpeXF2JiYpCXl2eRGMLCwnDgwAGj/QcOHEBoaKhZY/IeTiIq05RCg3c0+/GeZhfcTCyHeVyqhLdcXsZfikA7REdEDkUr6Ta5x8hw8OBBdO3aFZ07dwagW93nyy+/xG+//QZAN7u5cOFCTJo0CV27dgUArFu3DkFBQdi2bRt69+4tLz4ThgwZglGjRkGtVuOZZ54BACQkJGDcuHF4992S30v/ICacRFRm1dTexLLCzWguLhu1qaHAbOUzWKhsh0JJaYfoiMjR2OKhoaioKKxcuRJ//fUXnnjiCZw6dQq//vor5s+fDwBISUlBWloaoqOj9cf4+vqiZcuWSExMtEjCOXbsWNy6dQtvv/02CgoKAAAeHh4YP348JkyYYNaYTDiJqMyRhBZDNYmYotmBclAbtf8hBeFNl5dxWmHepSMiKp0eJ+HMyjJcMMLd3R3u7u5G/d977z1kZWWhTp06UCqV0Gg0+Oijj/TlidLS0gAAQUFBBscFBQXp2x6XJEmYNWsWJk+ejHPnzsHT0xO1atUyGW9JMeEkojIlTNzBUvXXaCMuGrVpIGGR8mnMVEajQOKPRyIy9DgJZ9hDVS2mTp2KDz74wKj/V199hfXr12PDhg2oX78+Tp48iVGjRiE0NBQDBgwwO3ZzeHt7o3nz5hYZiz9RyW6sWdvP2jU+a7p9Iqu/nPqULjKf5cuT5NW+9BZusvrflkp+E3o5Ie9HSqYiX1Z/Of+uruKhy+BCoK/2N3yo+R7eKDDqfxEV8V9lHxxXVIOnADwtXMok0H1OifveyB8ra2xnrmlL5FzkJ5yArv/ly5ehUqn0e4uaLRw7dizee+89/aXxiIgIXLp0CbGxsRgwYACCg4MBAOnp6QZPkqenp6Nx48YyY7MdPqVORKVekMjEhsLVWKD5xmSyuVLxFJ53eRfHFdVsHxwRlQkqlcpgKyrhvHv3LhQKw/RMqVRCq9VVzwgPD0dwcDASEhL07VlZWTh8+DAiIyOt9wYeE2c4iaj0EgLdtScRq9mG8rhn1JyK8hjp0hMHFDVlz84SURlkg7XUu3Tpgo8++ghVqlRB/fr1ceLECcyfP1+/6o8kSRg1ahRmzJiBWrVqITw8HJMnT0ZoaCi6desmLzYb4k9YIiqVAkQOZhduRRdx2mT7/xQtMFX5AnIkDxtHRkTOSmh1m9xj5Fi8eDEmT56Mt99+Gzdu3EBoaCjeeOMNTJkyRd9n3LhxyM3NxdChQ5GRkYHWrVsjPj4eHh7m/zx78sknkZCQgPLly2P69On473//i3Llypk93sOYcBJRqfO85iwWFm5FReQataXBB6Nd/oMERV07REZEzswWZZF8fHywcOFCLFy4sMg+kiRh+vTpmD59uqyxi3Pu3Dnk5uaifPnymDZtGt58800mnEREpviKe5hZ+D16a0+YbP9G0RgTlN2QIVnuhygRlR22SDjtpXHjxhg0aBBat24NIQTmzp0Lb29vk30fnG0tKSacRFQqPKP9C4vU36ASsozabqEcxrp0x3ZFQztERkSlRWlOOOPi4jB16lRs374dkiThp59+gouLcZooSRITTird5JSVuZMvr+yLnLEB3K9yUWIqmaWI5I0trxDvXROFzovjL0p+T5CbkFf4wkX7+CWgvEQeJmm241XtIZPH7JUaIVbRF7ehQtVi7qVKVt6RFYvcz4ycUkdyyxwRET2u2rVrY+PGjQAAhUKBhIQEBAZabklfJpxE5LRaai9gfuEmVMVto7ZseGKuohd+lFoCknPMMBCRYxNC/oylsHBNX1u4X4LJkphwEpHT8RBqjNP8hNe1+6GA8U/zX6QnMEcxEOmSvx2iI6JSywZlkRzFhQsXsHDhQpw7dw4AUK9ePYwcORI1atQwazwWficip9JYm4p49XwM1e4zSjZz4Ybxyh7o6zKEySYRWdz9ezjlbs5mx44dqFevHn777Tc0bNgQDRs2xOHDh1G/fn3s3LnTrDE5w0lETsFVFGKE5icM0+6BC4wv9xySwjHGpTdSpQDdDie8jEVEjq00PzT0oPfeew+jR4/GzJkzjfaPHz8ezz33nOwxmXASkcOrLa5hTuEXqIdrRm15cMEsZSesUrSBVuJFGyKyHlsUfncE586dw1dffWW0/7XXXiu2PmhxHOqnc2xsLJo3bw4fHx8EBgaiW7duSEpKeuRxmzdvRp06deDh4YGIiAj8+OOPNoiWiKxNKTR4U7MTWwvnmUw2T0qV0dF1ND5TtmWySURkIRUrVsTJkyeN9p88edLsJ9cdaobzl19+wbBhw9C8eXMUFhZi4sSJ6NChA/744w94eXmZPObgwYPo06cPYmNj8cILL2DDhg3o1q0bjh8/jgYNGtj4HZA1qSVNifvKLVnjZuW/vVxkjC+3tFCWVCCrv4fMb3uFjBpQ7jLHvgvjMkf3hYt0zNFsQGNxyahNDQWWKTrhc8Vz0EAJlYnL55cUxvU4ixOiNV3guCj/SMZrsxdHTqmjO3nyynoRkW2UlUvqQ4YMwdChQ3Hx4kVERUUBAA4cOIBZs2ZhzJgxZo0pCeG4D+zfvHkTgYGB+OWXX/D000+b7NOrVy/k5uZi+/bt+n2tWrVC48aNsWLFike+RlZWFnx9fZGZmQmVSmWx2Mny5PzCdhVKWWNbO+H0llGH05kTTi/hKmvsO1Ke0T5JaNFfux9jtdvhYaJmaBJCMcHlVfwpVS52bLkJYQXhKau/3PHl/MHEhJPImD1/X99/7V87Doe3q7zaxznqfLSOX+JUeYYQAgsXLsS8efNw7Zru6lJoaCjGjh2Ld955B5IZpeYcaobzYZmZmQAAf/+inzZNTEw0yrZjYmKwbds2k/3z8/ORn5+v/zorS94sCBFZT2VxC7M0G9BSXDBq00DCKkU0lio6QS3JS2yJiCyhrMxwSpKE0aNHY/To0cjOzgagW+P9cZR4KiUyMhJnzpx5rBeTQ6vVYtSoUXjqqaeKvTSelpaGoKAgg31BQUFIS0sz2T82Nha+vr76LSwszKJxE5EZhEAvbSK2F842mWxeREX0Uo7EQuWLTDaJyG7KSlmkB/n4+Dx2sgnISDj//vtvNG3aFBMnTkRenvFlMEsbNmwYzpw5o19myVImTJiAzMxM/Xb58mWLjk9E8gSJDKzSfIqPNJvgjXyj9jWKp/Giy1icVFSzfXBERA8oiwmnpZQ44UxKSsLrr7+O2bNnIyIiArt27bJaUMOHD8f27duxZ88eVK5c/H1awcHBSE9PN9iXnp6O4OBgk/3d3d2hUqkMNiKyAyHwovYofiychbbiT6PmKyiPfsph+EjZHXmS9daiJyIi6ytxwqlSqbB06VIkJiZCpVIhJiYGr776Km7evGmxYIQQGD58OLZu3Yrdu3cjPDz8kcdERkYiISHBYN/OnTsRGRlpsbiIyLL8RDZmaVdivuYL+ML44ZuNUiRecBmPQ4padoiOiMg0znCaT/ZDQ82bN8eRI0ewePFiTJ48Gdu3bzd5H6QkSTh16pSssYcNG4YNGzbg22+/hY+Pj/4+TF9fX3h66p4g7d+/PypVqoTY2FgAwMiRI9G2bVvMmzcPnTt3xsaNG3H06FGsXLlS7lsjIhtopz2BCdoN8Ee2UVs6VJio7I1fFPXsEBkR0SMICdCW7rXU1Wo1OnbsiBUrVqBWLcv90W/WU+qFhYW4efMm8vPzERAQgICAAIsEs3z5cgBAu3btDPavWbMGAwcOBACkpqZCofj/idmoqChs2LABkyZNwsSJE1GrVi1s27aNNTjLOH/hIau/nNI/AOAi8weInPqUN6W7ssYulOQtY1FOa73iFLmScRmj+1TiLiZqvsaL4ojJ9t1ohWVSX+QIb9QsooJQqjKnxLFUlllX865UdE1QU2SX0rJiATo5JcMAll0iMldZeErd1dUVv//+u8XHlf2bZ9euXXj77bdx8eJFvP322/joo48s8vQSoLuk/ih79+412tezZ0/07NnTIjEQkeW11v6BDzUbEIRMo7YMeGOJ1B+/Ss3tEBkRUcmVhYQTAPr164dVq1YZraX+OEqccN68eROjR4/Gl19+iYiICBw8eBAtWrSwWCBEVPqUE3kYp92Gl7UHTLYfwJNYLPVHhuRr48iIiOQTQrfJPcbZFBYWYvXq1di1axeaNm1qtNrj/PnzZY9Z4oSzdu3aKCgowMyZMzFmzBgolfJWciGisqWZ9jw+1nyByrht1JYFT3yk/A9OadsDZqxYQURE1nPmzBk8+eSTAIC//vrLoM2cVYYAGQlnq1atsGzZMlSrVs2sFyKissFdFGCU9nu8qv0FChM3Lv4q1cFk5StIl8qjihNeaiKiMsycp86d8Ofcnj17LD5miRPOH3/80eIvTkSlS4T2b8Rq/ofquGHUdhdumK14CV8pnuKsJhE5pbJyD+d9ycnJuHDhAp5++ml4enpCCGH9GU4ioqK4iEIM1f6IQWIHlCZmNY9KNTBR2Q9XpAp2iI6IyDLKSsJ569YtvPzyy9izZw8kScL58+dRvXp1DB48GOXLl8e8efNkj8mEkyxGbmkWVyHvPmAvWG8NbblljgpkliIqQEGJ+1YU5WSN7aeVtwpPIeTFrhLFn/dqIhWjxEqEw3iZ2Hy44FPpJWyWnoUQCvg/lIteU+TKisXrEbE8yNrlpQpknke1VEStJwtwpDJH1i7RFOg+p8R9b+SPlTW2XCxHVfaUlYRz9OjRcHV1RWpqKurWravf36tXL4wZM4YJJxHZjkJo0B0/orfYClcYJ1N/oBo+UgzCJSnEDtEREVme0EoQMgu/y+3vCH7++Wfs2LHDaHnxWrVq4dKlS2aNyYSTiGSrJK5hlPgMT+CiUZsaSqyRXsB6qSM0EqtZEBE5m9zcXJQrZ3y17fbt23B3dzdrTJlLZRBRWSYJLbqIHVggpphMNpNRCUMUE7FO0ZnJJhGVOro6nHLXUrd31PK1adMG69at038tSRK0Wi1mz56N9u3bmzUmZziJqEQCxU28Iz5HBP40atNAwha8gGWKjlBL1rvXlojInsrKPZyzZ8/Gs88+i6NHj6KgoADjxo3D2bNncfv2bRw4YHohj0dhwklExRMCHbAXr4mN8ESeUfNVBGOhNBR/STWglvLtECARkW2UlYSzQYMG+Ouvv7BkyRL4+PggJycH3bt3x7BhwxASYt59+Uw4iahIFcQdvCfWoSlOm2z/Dh3wP+k/KJDMu6eHiMiZlJWEEwB8fX3x/vvvW2w8JpxULDklSCDze0pumRh/rUeJ+/5ZMELW2PXclsjqX07I+9bJk/FeXWSeyAyp5CWXSjy+EIgRhzFGuwkqGJcXSkMA5igG43epzr97dO9PI8m7WSlLZuyQUV7KReYt6ndRKKu/3M9vWSmJY+33ae1SR3KUlX9T+n9lKeG8c+cOVq1ahXPnzgEA6tWrh0GDBsHf39+s8fjQEBEZKC+y8LH2U3ygXWMy2fxBehpDFdMfSDaJiKg02bdvH6pVq4ZFixbhzp07uHPnDhYtWoTw8HDs27fPrDGZcBKRXlvtcXyhmY724oRR2y34YqJiFBYqBuKe5GmH6IiI7Ev+E+pmrL0O4OrVq+jXrx8CAgLg6emJiIgIHD169IE4BKZMmYKQkBB4enoiOjoa58+ft9j7HDZsGHr16oWUlBRs2bIFW7ZswcWLF9G7d28MGzbMrDGZcBIRfEQupmpWYab2U/gj26h9l9QKQxQf4ojU0A7RERE5BlsknHfu3MFTTz0FV1dX/PTTT/jjjz8wb948lC9fXt9n9uzZWLRoEVasWIHDhw/Dy8sLMTExyMszfrDTHMnJyXj33XehVP5/eTulUokxY8YgOTnZrDF5DydRGddKewYTtf9DRWQYtd2BNxYp+uNXqZntAyMicjBCmLHSkMyEc9asWQgLC8OaNWv0+8LDwx8YT2DhwoWYNGkSunbtCgBYt24dgoKCsG3bNvTu3VvW65ny5JNP4ty5c6hdu7bB/nPnzqFRo0ZmjcmEk6iMKifyMEL7NbqJ/Sbbf5EaY5aiL9QIsHFkRESOyRYPDX333XeIiYlBz5498csvv6BSpUp4++23MWTIEABASkoK0tLSEB0drT/G19cXLVu2RGJiotkJ5++//67/73feeQcjR45EcnIyWrVqBQA4dOgQli5dipkzZ5o1PhNOojKoifgLkzRxCMUto7ZseGKeojd2SC0BSYK3E66SQURkDbqVhuQfAwBZWVkG+93d3U0uE3nx4kUsX74cY8aMwcSJE3HkyBG88847cHNzw4ABA5CWlgYACAoKMjguKChI32aOxo0bQ5IkiAfe4Lhx44z6vfLKK+jVq5fs8ZlwljHlPeT9ZXInv+RlP+SO7SXkrUiTJ6NsTUPXpbLG1sos5yOnzJFcoRrj9WuLc01p/CR5UdxFAT7U/Ig2+MlkexIa4WsMxV1tANr8u+86Sv5ebyvk3RZeqJB33m9KJX+vyQUjZY0t9/Mrl5zxrV1uR/bPAZb/IXpsYWFhBl9PnToVH3zwgVE/rVaLZs2a4eOPPwYANGnSBGfOnMGKFSswYMAAq8WXkpJitbEBJpxEZUY9kYIpmjWoinSjtny4YztexWE8C9kFVYmIygitkKCVeYn8fv/Lly9DpVLp95ua3QSAkJAQ1KtXz2Bf3bp18c033wAAgoODAQDp6ekGq/6kp6ejcePGsmJ7UNWqVc0+tiSYcBKVcq5Cjde0P+BVEQ8ljGcUL6IuvsKbuI0gE0cTEdF9j3MPp0qlMkg4i/LUU08hKSnJYN9ff/2lTwjDw8MRHByMhIQEfYKZlZWFw4cP46233pIVW3GuXbuGX3/9FTdu3IBWqzVoe+edd2SPx4STqBSrKS5jiiYOtXDFqE0NV/yEPjiAjhCskEZE9Gjm1NWU2X/06NGIiorCxx9/jJdffhm//fYbVq5ciZUrVwIAJEnCqFGjMGPGDNSqVQvh4eGYPHkyQkND0a1bN3mxFSEuLg5vvPEG3NzcEBAQAEn6//cgSRITTiLSUQoN+okdGKzdDlcT92CmogY24W3cRCU7REdE5Jxs8ZR68+bNsXXrVkyYMAHTp09HeHg4Fi5ciL59++r7jBs3Drm5uRg6dCgyMjLQunVrxMfHw8Oj5EtAF2fy5MmYMmUKJkyYAIXMe/OLwoSTqJSpKtIwRbMG9fC3UZsaSuzCf/ALXoQWSuODiYioSLZaS/2FF17ACy+8UGS7JEmYPn06pk+fLnvskrh79y569+5tsWQT4EpDRKWGJLTord2FtZoZJpPN86iM15QTsAcvMdkkIqIiDR48GJs3b7bomJzhJCoFQsQ/mKyJQxMYr6WrgYT/SR2xWtEZaskVISaOJyKiRxNaCUKSOcMpc2UiRxAbG4sXXngB8fHxiIiIgKurYRnD+fPnyx6TCScVS069Pl+t6RIPRfGQ+fFTyCjX4wV5NT5dhLy6mmFaL1n95fATcmYfBXoVHkFX/A/uyDdqvYEQrMfbSBW10PTft3hWIW+tXV9R8n+nbEkta2w5dTUBoFDSPrqTjThSbUrW1SSyDVtdUre32NhY7NixQ7+05cMPDZmDCSeRk/LDbQzESkTgd5Ptv6ATfkBvqCHvDwEiIjKtrCSc8+bNw+rVqzFw4ECLjelQ93Du27cPXbp0QWhoKCRJwrZt24rtv3fvXkiSZLQ9ztJORI5PoBV+xXSMM5ls3kJFLMFkbMMAJptERBZ0P+GUuzkbd3d3PPXUUxYd06ESztzcXDRq1AhLl8pbljApKQnXr1/Xb4GBgVaKkMi+fJCJt7EQQ7AMXjC+HH0Qz2IOZuMC6tshOiKi0k0r/n+1oZJv9o5avpEjR2Lx4sUWHdOhLql36tQJnTp1kn1cYGAg/Pz8LB8QkQN5Er+hP1bBB9lGbRkoj014A3+ise0DIyKiUuW3337D7t27sX37dtSvX9/ooaEtW7bIHtOhEk5zNW7cGPn5+WjQoAE++OADi08DE9lTOeTgFaxFJA6YbD+K1tiCgbgHbxtHRkRUtpSVezj9/PzQvXt3i47p1AlnSEgIVqxYgWbNmiE/Px+ff/452rVrh8OHD+PJJ580eUx+fj7y8///ad6srCxbhUskWwOcxEB8hvK4Y9SWBRX+h9eQjFZ2iIyIqOwpKwnnmjVrLD6mUyectWvX1j+yDwBRUVG4cOECFixYgP/9738mj4mNjcW0adNsFaLTc5VRoidTYVyWpzguWnm3EIfKKEWkhbybZgIgbzkwFyEv9ooySguFugBu4h6e036BpmK3yT7npOb4QTEYdyVf5BTKe6/BWjdZ/ZOUOSXum6aQV+bo74LRsvrLIbdUkLXJiUdu2SKWOSKyDSEAIbM6m3DCezitwakTTlNatGiBX3/9tcj2CRMmYMyYMfqvs7KyEBYWZovQiEqkqvYPvKhdgfK4adR2D174STEQZ6SnADNroRERkXnKygxneHh4sfU2L168KHvMUpdwnjx5EiEhRa+l4u7uDnd3loohx+OKfHTDRjyrjTfZniw1wveKociW/G0cGRERAf//hLrcY5zNqFGjDL5Wq9U4ceIE4uPjMXbsWLPGdKiEMycnB8nJyfqvU1JScPLkSfj7+6NKlSqYMGECrl69inXr1gEAFi5ciPDwcNSvXx95eXn4/PPPsXv3bvz888/2egtEZgnHeQzAcgTjulFbPjzws6IfTkjPcFaTiIisbuTIkSb3L126FEePHjVrTIdKOI8ePYr27dvrv75/6XvAgAGIi4vD9evXkZqaqm8vKCjAu+++i6tXr6JcuXJo2LAhdu3aZTAGkSNzgRqd8Q1i8B0UJu47/Rt18Z3yTWRIrC1LRGRvZeWSelE6deqECRMmmPVQkUMlnO3atYMo5u7auLg4g6/HjRuHcePGWTkqIuuohEsYhGWojFSjNjVcsVvRG4eljoDkUOszEBGVWWU94fz666/h72/ebV0OlXASlQUKaBCD79AZ38AFGqP2q6iBbcq3cEuqZIfoiIioKGUl4WzSpInBQ0NCCKSlpeHmzZtYtmyZWWMy4Sxj5JZPqew+r8R9VTLL7bih5CWXAKCituSli/Ik40SuOKFaeQ+StauSKav/jdu6ouzltdfQMW8FgrXGT/hpoESiW3d8V9ANWk3Jz02eiaS1OJeU8koXySl1lCUVyBo70H2OrP438s27Wb0k5H5vyI39Tn7Jx5db0ollkYhsQ5jx0JAzJpzdunUz+FqhUKBixYpo164d6tSpY9aYTDiJbEFo0US9A60LvoIL1EbNNxVVEO/+Bv5RVoW2QF4iTkREtiGE/LqazliHc+rUqRYfkwknkZV5F95E23vrUVn7p1GbFhKOuL6IQ24vQSvx25GIiEon/oYjshYhUDNnP57M+AauwngVpttSCHZ4vIE0ZU07BEdERHIJrQQBmZfUtc5zSV2hUBRb8B0AJElCYWGh7LGZcBJZgWfhHbS6/T+E5v1h1CYg4bhrDA64vQyNJO++VyIisp/S/tDQ1q1bi2xLTEzEokWLoNXKXNvzX0w4iSxJCITfPYxmtzfBTdwzas6UKmKHx1BcVda1Q3BERPQ4SvtKQ127djXal5SUhPfeew/ff/89+vbti+nTp5s1NhNOIgtx12Sh5e31CLt3ymT77y7PYJ97H6glTxtHRkREllBWHhoCgGvXrmHq1KlYu3YtYmJicPLkSTRo0MDs8ZhwEllA2N3jaHF7Azy0OUZtd5W+iHcdiksujewQGRERWUppv6QOAJmZmfj444+xePFiNG7cGAkJCWjTps1jj8uE08nJrdfnKuSV3CknSv4RUQl59yMWQt59IHI+rM0keXU13d3k/QnqVS4PAOCqyUXD9C0Iyzpmst9lVTP8HtQdN26HwhMlu8n6Xo68fyONJC/2i0p5NUTlUMusf+rM9SPlvlc5nPm8EJHzmj17NmbNmoXg4GB8+eWXJi+xm4sJJ5GZgnL+QOO0jfAszDJqy1d642Twy7ju09AOkRERkTWU9ns433vvPXh6eqJmzZpYu3Yt1q5da7Lfli1bZI/NhJNIJjdxF42vb0S1zEMm2695N8TJ4JdR4OJt48iIiMiaSvs9nP37939kWSRzMeEkkqGq9iw6F34Kv8x/jNoKFJ74PagHrqiaAlb6hiUiIvsp7fdwxsXFWW1sJpxEJeAi8tFesxHNtTtMtqd71cGJ4N7Ic/WzbWBERGQzpf2SujUx4SR6hFDteXQpXIEAXDdqUyvccSawGy75tuKsJhFRKScEIGTWPXemS+rWxISTqAhKoUZrzTeI1H4PBYx/YvzjWQPHQ17BXbcAO0RHRETkPJhwOhi5ZY7kcoNCVv8Q4VXivkqZlw3klvNRySjR1KjGTVljZ+UYFmP3V6ei9Z3V8NdeNepbCFcc9emBP8o9C9xVAHcfPf7t7JKXjDqlzC5xXwBIU5QggAfkSmpZ/eVwpHI+ckuAyeVI75WIbEMIM9ZS5yV1AEw4iQxIQoOInJ/QOHs7FDCus3jTNRz7y7+GDEWoHaIjIiJ70goJWpkJJ+/h1GHCSfQvX/V1tM5YjYrqv43aNFDipM+LOOMdAyEpIbNmPRERlQYCJm6wevQxBJnXV4lKI6FFvZyf8eLN6SaTzdsulbG94iSc9nlel2wSEVGZpBX//6R6ybfHe82ZM2dCkiSMGjVKvy8vLw/Dhg1DQEAAvL290aNHD6Snpz/eC1kZZzipTCtX8A+apa9HhXsXjNq0UOC0dyec8nkBWonfKkREZZ0wY4bzcZ5SP3LkCD799FM0bGi4at3o0aPxww8/YPPmzfD19cXw4cPRvXt3HDhwwPwXszL+FqWySQiEZx5AxM1tcBEFRs0ZLsH41e81/OMWbofgiIiorMvJyUHfvn3x2WefYcaMGfr9mZmZWLVqFTZs2IBnnnkGALBmzRrUrVsXhw4dQqtWrewVcrF4SZ3KHE/1HTx1dTma3PjKKNkUkHDWKxrfV5zMZJOIiAzcX2lI7gYAWVlZBlt+fn6xrzVs2DB07twZ0dHRBvuPHTsGtVptsL9OnTqoUqUKEhMTLf+mLYQznA5GbqkVuWWUvEXJy/MAwB2p+G+IBwUKz0d3ekAljbz+T1cuebkglW+O8U4hEHLrBJ5I3Q5XTZ5R8z338kiq1Q2ZvtVQBcZLVz7o3J9VSxwLAJxSGz/xXpQ0F3lljm4p7snqL5c1y/8Eus+R1V8tlfw83sln2SIisiytkP/M6P17OMPCwgz2T506FR988IHJYzZu3Ijjx4/jyJEjRm1paWlwc3ODn5+fwf6goCCkpaXJjM52mHBSmeCmzkadv79FYMYfJtuvBTXDxWodoHFxt3FkRETkLB7nHs7Lly9DpVLp97u7m/59c/nyZYwcORI7d+6Eh4eHmZE6HiacVOoF3j6NOpe+hVuh8cxhnqsKf4R3R3ZFXj4nIqLiPU4dTpVKZZBwFuXYsWO4ceMGnnzySf0+jUaDffv2YcmSJdixYwcKCgqQkZFhMMuZnp6O4OBgWbHZEhNOKrVcCu+i9qXvEXL7lMn26wFNkFTlBRS6eMIV1lt9h4iISgdbPKX+7LPP4vTp0wb7Bg0ahDp16mD8+PEICwuDq6srEhIS0KNHDwBAUlISUlNTERkZKTM622HCSaVSQEYS6v29Be5q4/s+C1y8cK5aN9wsX98OkRERERXNx8cHDRo0MNjn5eWFgIAA/f7BgwdjzJgx8Pf3h0qlwogRIxAZGemwT6gDTDiplHHV3kPTzM2odeVXk+03ytfHuapdoXb1tnFkRETk7Gxdh7MoCxYsgEKhQI8ePZCfn4+YmBgsW7bM8i9kQQ5VFmnfvn3o0qULQkNDIUkStm3b9shj9u7diyeffBLu7u6oWbMm4uLirB4nOaagvD/xQvp01Mo1TjbVSg+crv4yfq/xCpNNIiIyi/xVhiSLrKW+d+9eLFy4UP+1h4cHli5ditu3byM3Nxdbtmxx6Ps3AQdLOHNzc9GoUSMsXbq0RP1TUlLQuXNntG/fHidPnsSoUaPw+uuvY8eOHVaOlByJUpuPZhkb0eGf+fDW3DJq/8f3CSQ2GIX0gMaA9Pjf+EREVDYJMzdysEvqnTp1QqdOnUrcf8WKFQgPD8e8efMAAHXr1sWvv/6KBQsWICYmxlphOhRXIW9t73JC3j95FW3JZwPLCXl/vzRwldffz8+4tmb5u3+j8ZUv4V1w06hNo3TFlbrP4FblCHhLGgB3ihz72NG6smLZc1veeT/tklHivskFI2WNLbcWqyORU1cTsG5NUCKiR3mcOpxlnUMlnHIlJiYaVeCPiYkxWOCeSieFthBP3IhHzX/2QDLx92O2fxguRXRCQTlfO0RHRESlkYAEIbMsktz+pZVTJ5xpaWkICgoy2BcUFISsrCzcu3cPnp7GK9nk5+cbLCeVlZVl9TjJslT3rqDJlQ1Q5RuvqKCRXHAu+AUUNKrJy+dEREQOwqkTTnPExsZi2rRp9g6DzCAJDWreTMATN36GwsRFjTueVXCi8ivIdQ9EJem6HSIkIqLSTJhxSd0aT6k7I4d6aEiu4OBgpKenG+xLT0+HSqUyObsJABMmTEBmZqZ+u3z5si1CpcdUQVxB64uLUOdGvFGyqZWUOBfUGQeqj0Cue6CdIiQiotKODw2Zz6lnOCMjI/Hjjz8a7Nu5c2exlfbd3d2LXL+UHI8ktGiu/QntNF/BRW28GlCmRyhOVH4F2R6hdoiOiIjKEj40ZD6HSjhzcnKQnJys/zolJQUnT56Ev78/qlSpggkTJuDq1atYt24dAODNN9/EkiVLMG7cOLz22mvYvXs3vvrqK/zwww/2egtkQX4iHS8Ufooq4k+jNi0USK74LP6q+ByEwqE+xkREVEqZM2PJfFPHoX5THz16FO3bt9d/PWbMGADAgAEDEBcXh+vXryM1NVXfHh4ejh9++AGjR4/GJ598gsqVK+Pzzz8vMyWRAMDNyndF5EqFJe4boPWQNXbjOtdMNwiBKrcTUef6driIAqPme14BSGnQGbm+oQjCbZNDXL0cIiuWfTflzXpfdDFeMrM4/0j3StzX0cocyYlHbtkiljkiImfCGU7zOVTC2a5dO4hi7q41tYpQu3btcOLECStGRbbkUZCBiCtfoWLOX0ZtAkB61ea4UvNpCKWr7YMjIiIiszhUwkllmBColHEM9a5ug6s2z6j5rps/LjXsiBz/KnYIjoiIiJfUHwcTTrI7N3U2Glz9GsFZZ022X/KPxJ8hL8DfX95lbCIiIkvSwoxL6tYIxAkx4SS7Cs44hQZXv4Gb5q5R2z1XX5yu/DL+8an97x4mnEREZD+c4TQfE06yCy/koHHqeoRmmL7/9kr5pvgjtBsKlabrqRIREdkaZzjNx4STbK4hTmAAPoNfRoZRW76LN85U+g/SfRvYPjAiIqJiCMhfOYgznDpMOJ2ci7BuWaSWolyJ+7ZunFpsu1KThzpXv0fY7SMm2wtqh+JuxyYIL5eFcBw0ar9w7IkSxxJ/Rl5ZpCOuN2X1/7NghKz+ld3nlbjvjbyxssaWW0aJpYiIiMjWmHCSTfhnJyMi9St4qjOM2rQerrjboTEK6lUGJMn2wREREZUAL6mbjwknWZVCW4Da135C1X8OmGwvqB6E3OefhPDhvZpEROTY+NCQ+ZhwktX45V5CROomeOX/Y9RWqHBDXscGKGhYlbOaRETkFATkz1gy4dRhwkkWJ2kLUSvtZ4Tf+AWSiW+1W97VcSbsZUQ0Ml5NiIiIyFHxkrr5mHCSRfncvYqGqZvgk5dm1KaRXPFXaCdcqhAFSNZ92ImIiMjSeEndfEw4ySIUohA10naiRloCFCb+nssoVwW/V+mFux4V7RAdERER2RMTTnpsFcUV9BDLUDntolGbVlLifHAH/B34NISktEN0RERElsFL6uZjwulg5NZUrCnKy+pfS+Mtq3+np/8oulFoEXrlMKqm7IYCGuPmMC9oXq+J6pUzUR3fG7X/9UMzWbGs/LlRifsedrkla+xCB/qRwLqaRESOSfz7P7nHEBNOMpPHvduo+ee38M26bNQmFIC2c2VoX6gMuPBeTSIiKh04w2k+JpwkjxAIvn4M1S7shFKrNm4O8YRmcE2IcB87BEdERGQ9fGjIfEw4qcTc8jJR86/vUf6O8b2aAoC2Qyi0L4UBbrxXk4iISh/OcJqPCSc9mhComP47qifHw0WTb9R8z6M8kmu/iDq9ztshOCIiInJ0TDipWK4FOajx13YE3DJdpP16aDP8XT0aWqUbACacRERUegkICEnmQ0OCF9UBJpxUjJbiMJoc+RyuhfeM2vLdVUh+ogsy/GvYITIiIiLb4yV18zHhdDCuQt79j4FaD1n9O1W6+8g+btpctLizAeH3jgCFxu3qpqEofLEOqnn+A+D/10m/eSJcVizLtkTK6r/T9XqJ+7pA3tPxeZKJN1oM2aWL8lm6iIjI2THhNB8TTjJQ6d7vaHVnHcpps4zahLcb8nvUh6Z+oB0iIyIisjf5dTj5nLoOE04CALhq76FZxleoefeAyfbCiCDkd68HeLnZODIiy9BoNFCrjUt5EZFjcHV1hVLp2FVOOMNpPiachOC8PxF5Jw7emttGbcLTBfnd6kHTOBiQJDtER/R4hBBIS0tDRkaGvUMhokfw8/NDcHAwpDL8+yY2NhZbtmzBn3/+CU9PT0RFRWHWrFmoXbu2vk9eXh7effddbNy4Efn5+YiJicGyZcsQFBRkx8iLx4SzDHPR5qNJ5hbUyd1jsv2KRwP4jwmA8JV3nyiRI7mfbAYGBqJcuXJl+hcZkaMSQuDu3bu4ceMGACAkJMTOEZlmi6Utf/nlFwwbNgzNmzdHYWEhJk6ciA4dOuCPP/6Al5cXAGD06NH44YcfsHnzZvj6+mL48OHo3r07DhwwfZXSETDhLKMq5l9A1J01UBXeMGorkDxwzO9lJJd7Ct19t9ghOiLL0Gg0+mQzICDA3uEQUTE8PT0BADdu3EBgYKBDXl63xSX1+Ph4g6/j4uIQGBiIY8eO4emnn0ZmZiZWrVqFDRs24JlnngEArFmzBnXr1sWhQ4fQqlUrma9oG0w4yxiFUKNR1veol70DChN/daW518bB8gOQ61LBDtERWdb9ezbLlStn50iIqCTuf6+q1WqHTDiFpNtkHfPv/8nKMnwY193dHe7u7o88PjMzEwDg7+8PADh27BjUajWio6P1ferUqYMqVaogMTGRCWdZJqeETn2NvFmYHoGaEvcNUF/Ciy6zIGUbrxYk3CWIQcEIfMEV3RRf6vefXflsicdftK1lifsCwAHXfx7d6QF3ZZQuupE/VtbYsssc5bHMkTPhZXQi5+Do36u6GU55l8jvz3CGhYUZ7J86dSo++OCD4o/VajFq1Cg89dRTaNCgAQDdbUJubm7w8/Mz6BsUFIS0tDRZsdkSE84yQBKFaJT7IxrnbId0yzhBFXU8IcaEAZUf/ZcWERFRWfU4l9QvX74MlUql31+S2c1hw4bhzJkz+PXXX2W+quNhwlnK+RVew9MZn6NC4SWjNuEiQbwaBHSvACgd+69KIiIiZ6ZSqQwSzkcZPnw4tm/fjn379qFy5cr6/cHBwSgoKEBGRobBLGd6ejqCg4MtGbJFyVuOxUaWLl2KatWqwcPDAy1btsRvv/1WZN+4uDhIkmSweXjwqWpJaNEgNx4v/jPNdLJZ3QPik5pAz4pMNokciBAC0dHRiImJMWpbtmwZ/Pz8cOXKFYu+5u3btzFixAjUrl0bnp6eqFKlCt555x39vWOmqNVqjB8/HhEREfDy8kJoaCj69++Pa9euGfR7+Ofz/W3jxo1mx/uo3xF5eXkYNmwYAgIC4O3tjR49eiA9Pb3I8XJzc1GjRg2MGTPGYP/ff/8NlUqFzz77zOxYH6Vdu3ZG5+bNN9985HHnzp3Diy++CF9fX3h5eaF58+ZITU3Vt1erVs3keZ85U97tQw/avHkz6tSpAw8PD0RERODHH380aN+yZQs6dOiAgIAASJKEkydPmv1ajkqY+T9ZryEEhg8fjq1bt2L37t0IDzdcxa9p06ZwdXVFQkKCfl9SUhJSU1MRGSlvBT9bcriEc9OmTRgzZgymTp2K48ePo1GjRoiJidGXSjBFpVLh+vXr+u3SJeMEqyzxKUzH87dnoUX2Zrg8tDalUADilUCIBTWAcCbmRI5GkiSsWbMGhw8fxqeffqrfn5KSgnHjxmHx4sUGsx2WcO3aNVy7dg1z587FmTNnEBcXh/j4eAwePLjIY+7evYvjx49j8uTJOH78OLZs2YKkpCS8+OKLRn3XrFlj8DP6+vXr6Natm8lx4+Li0K5duyJftyS/I0aPHo3vv/8emzdvxi+//IJr166he/fuRY7p5eWFNWvWYPHixdi/fz8A3S/9QYMG4amnnsKQIUOKPPZhkiTh77//LnF/ABgyZIjBuZk9e3ax/S9cuIDWrVujTp062Lt3L37//XdMnjzZaLJl+vTpRud9xIgRJsfcu3cvqlWrVuRrHjx4EH369MHgwYNx4sQJdOvWDd26dcOZM2f0fXJzc9G6dWvMmjWr5G/eyWjN3OQYNmwYvvjiC2zYsAE+Pj5IS0tDWloa7t27BwDw9fXF4MGDMWbMGOzZswfHjh3DoEGDEBkZ6bAPDAEOeEl9/vz5GDJkCAYNGgQAWLFiBX744QesXr0a771n+kENSZIcehrZZoQWde7tRfPszXAVBUbNd5Qh8J1XDniCT+wSObKwsDB88sknGD58ODp06IBq1aph8ODB6NChA1599VWLv16DBg3wzTff6L+uUaMGPvroI/Tr1w+FhYVwcTH+VeHr64udO3ca7FuyZAlatGiB1NRUVKlSRb//fjFvS3jU7whzS8Y8/fTTGDFiBAYNGoRTp07hs88+w8mTJw0SKmspV66crPPz/vvv4/nnnzdITGvUqGHUz8fHx2Ln/ZNPPkHHjh0xdqzugcwPP/wQO3fuxJIlS7BixQoA0H825SbczkQLYcZDQ/L6L1++HACM/vBas2YNBg4cCABYsGABFAoFevToYVD43ZE51AxnQUEBjh07ZvCov0KhQHR0NBITE4s8LicnB1WrVkVYWBi6du2Ks2fPFtk3Pz8fWVlZBltp4KW5jZg7CxCVtd4o2RSQcLpcDL6rMJXJJpGTGDBgAJ599lm89tprWLJkCc6cOWMw4/mwTp06wdvbu8itfv36sl4/MzMTKpXKZLJZ3DGSJBk9PWspJfkd8aiSMcX56KOP4OLign79+mHixIlYvHgxKlWqZJX38qD169ejQoUKaNCgASZMmIC7d+8W2Ver1eKHH37AE088gZiYGAQGBqJly5bYtm2bVWNMTEw0OKcAEBMT88hzWtrcL4skd5P1GkKY3O4nmwDg4eGBpUuX4vbt28jNzcWWLVscfuLNoWY4//nnH2g0GqOlmYKCgvDnn3+aPKZ27dpYvXo1GjZsiMzMTMydOxdRUVE4e/asyctOsbGxmDZtmlXitwshUPPeAbTK3gg3cc+oOUtZEft9ByPdrZYdgiOix7Fy5UrUr18f+/btwzfffIOKFSsW2ffzzz/XX3IzxdXVtcSv+88//+DDDz/E0KFDS3xMXl4exo8fjz59+hg9GNGnTx+jmop//PGHwSxoSeN61O+IxykZ4+npqZ/J69SpE/r16ycrPnO88sorqFq1KkJDQ/H7779j/PjxSEpKwpYtphfduHHjBnJycjBz5kzMmDEDs2bNQnx8PLp37449e/agbdu2+r7jx4/HpEmTDI7/6aef0KZNG9lxpqWlmTzvjlyGxxpsMcNZWjlUwmmOyMhIg5tko6KiULduXXz66af48MMPjfpPmDDB4MbwrKwso9pYjyK3ZmOA1rPEfduJkj/B5i0yMKDRaCh/zjXZXjjAF67vq/CM1/9fKkudLO8HzaTvmpS4b6LbZVljq6WS1xAFrFv7knU1yREFBgbijTfewLZt24q85/E+S83EZWVloXPnzqhXr94jawTep1ar8fLLL0MIob8c+KAFCxYYzY6FhoYCAFJTU1GvXj39/sLCQqjVanh7e+v3TZw4ERMnTjTj3ci3atUqlCtXDqdPn0ZmZiZ8fX2L7d+pUyf9fZ/31a9fX19PsmrVqsVedXswqY+IiEBISAieffZZXLhwweRlcq1Wd0dg165dMXr0aABA48aNcfDgQaxYscIg4Rw7dqzBrBhg+Dl58BxrNBrk5+cb7OvXr5/+cjnR43KohLNChQpQKpVGTxPKedTf1dUVTZo0QXJyssn2klb2d3T1xSG8KFaZTDa1IS5Qzw+E9mkvO0RGRJbk4uJSosvaphKfBz0q8QGA7OxsdOzYET4+Pti6dWuJZkXvJ5uXLl3C7t27TZZ9CQ4ORs2aNU0eHxoaavA085YtW/DNN99g/fr1+n33V1gpye+IxykZs2nTJmzfvh2JiYno06cPRo8ejdWrVxd7zMMzy7Vq1cKPP/6oT+zkzCwDQMuWugU0kpOTTSacFSpUgIuLi0GSDgB169Y1qtVYoUKFIs87AIPzfvjwYYwfPx579+7V73vw3zI4OPixfjeXHvKfOgdnOAE4WMLp5uaGpk2bIiEhQf/XvFarRUJCAoYPH16iMTQaDU6fPo3nn3/eipHaj6fIRhexBg1x0GR7YU8fqKdVBHwdb0kwIrKex72knpWVhZiYGLi7u+O7774rUXm5+8nm+fPnsWfPHrPWq3dxcTFIigIDA+Hp6WkyUSrJ74gHS8b06NEDQMlKxqSnp2PYsGGYMWMGGjVqhLi4OERFRaFnz57o1KlTkceZmlmuWrVqsU98F+d+EhgSEmKy3c3NDc2bN0dSUpLB/r/++gtVq1aV9VoPnuMrV64Y/Vs8KDIyEgkJCRg1apR+386dOx26DI812GIt9dLKoRJOABgzZgwGDBiAZs2aoUWLFli4cCFyc3P1TyT2798flSpVQmxsLABd2YdWrVqhZs2ayMjIwJw5c3Dp0iW8/vrr9nwbVlFbHEM38Rl8kGHUJiooUTA7ENoYb+MDiajUe5xL6llZWejQoQPu3r2LL774wuCByooVK+rvv6xTpw5iY2Px0ksvQa1W4z//+Q+OHz+O7du3Q6PR6O/n8/f3h5ubm378jIwMo3v9fHx84OUl/yrMo35HPFgyxt/fHyqVCiNGjHhkyZihQ4eibt26+oSqRYsWGDt2LIYOHYozZ8488tK6OS5cuIANGzbg+eefR0BAAH7//XeMHj0aTz/9NBo2bKjv9+B5B3SXynv16oWnn34a7du3R3x8PL7//nuD2UlAN2P98HkvV66crOLj940cORJt27bFvHnz0LlzZ2zcuBFHjx7FypUr9X1u376N1NRUfS3W+0lxcHBwqZkJ5T2c5nO4hLNXr164efMmpkyZgrS0NDRu3Bjx8fH6m5VTU1OhUPz/w/V37tzBkCFDkJaWhvLly6Np06Y4ePCg0eUGZ+Yu7uJ5sQ5Nsddku6azNwpmBgL+nNUkIvmOHz+Ow4cPA4DRDFdKSop+ti4pKUlfDP7q1av47rvvAOjuIXzQnj17DEq63E8GHxQbG1tkqbviPOp3BCC/ZMy6deuwa9cunDp1yuD3y7Rp07B9+/YSXVo3h5ubG3bt2qVPmsPCwtCjRw+jB30ePO8A8NJLL2HFihWIjY3FO++8g9q1a+Obb75B69atDY6bMmUKpkyZYrDvjTfeMOu+zKioKGzYsAGTJk3CxIkTUatWLWzbtk2/vjcAfPfddwb/1r179wZguGb4wIED8ffffxslx85CQP4FcqabOpIQokyfi6ysLPj6+upLgJSENR8a6q0x/CuwujiN7uJT+OEfo7534QWXZT7QvOgNSCWru5Au86GhkXFtH93pX4kuRa/kYYojPTREpVNeXh5SUlIQHh7OFciIHEDbtm3Rvn37Ih9IK+571pzf15Zy/7XbKJfARSr573QAKBT3sF8z3C5xOxKHm+EkHVeRhw7iS0Rih8n2P9EE26ShGNl1vo0jIyIiki8zMxMXLlzADz/8YO9QyA6YcJrBVci7dP28WsYKElcnQ3HkHlxHpUPxt9qoXXgroJ5WAVV7ZWOkNB9Zc5rJimVY3NOy+h+RMWvpJnMdgRt5Y2X1J3psWi1w65b9Xj8gAFA41HobRDbj6+uLK1eu2DuMx8J7OM3HhNOBuKAALh/9A5cVdyCZeKxNE+UJ9YIgiMryymwQ0b9u3QICA+33+jduAMUUbycix8Z7OM3HhNNBVEYK+mEpXJfdMWoTHhLUkypAM8AXUMhcI4uIiIgsgjOc5mPCaWcKFKIDtiEGW6GE8UM0mqYeUC8MgqjuZuJoIiIishUmnObjzUR2FIzLeBeT8Ty+Nko2hZsE9fsBKNhamckmURk0cOBASJIESZLg6uqK8PBwjBs3Dnl5eQB0JXs6dOiABg0aoE+fPsjPzzfrdfLy8jBs2DAEBATA29sbPXr0MFpR5mH343p4mzNnDgBg7969RfY5cuSIWXESOQKtmRtxhtMuJGjRHj/gBWyCCwqN2rUN3FHwSRBEHedfgpPIoQQE6O6jtOfry9CxY0esWbMGarUax44dw4ABAyBJEmbNmoUJEyboi6vXqlULFy9eRN26dWWHNHr0aPzwww/YvHkzfH19MXz4cHTv3h0HDhwo8pjr168bfP3TTz9h8ODB+pV9oqKijPpMnjwZCQkJaNZM3oOORFQ6MOG0sQpIQ18sRw0kGbVpoIB2tB8K3/EH3HivJpHFKRRO9dCOu7u7foWWsLAwREdHY+fOnZg1a5Y+2ZwyZQq6d+9uVrKZmZmJVatWYcOGDXjmmWcAAGvWrEHdunVx6NChIlfmeXjVmG+//Rbt27dH9erVAegKmj/YR61W49tvv8WIESMglbBmMJEjEmaspS5/7fXSiQmnGboUVJbVf3reBEAr4LYyG54Tb0O6a/zh09R1xd3PK+LW552A6SUfu/v/msqK5YqLvNmdG/ksXUTkCM6cOYODBw/q18vOysrCm2++icjISIwYMULfb/369XjjjTeKHeunn35CmzZtcOzYMajVakRHR+vb6tSpgypVqiAxMbHYpSDvS09Pxw8//IC1a9cW2ee7777DrVu3TK44RORMhBn3cDLh1GHCaQNSaiHKvXkTrrvzjNqEBOSP8kXeVD/AQwF8bvv4iMgxbd++Hd7e3igsLER+fj4UCgWWLFkCAHj11Vdx6NAhXLx4EevXr8e8efPw1FNP4cUXX0TLli2LHff+uutpaWlwc3ODn5+fQXtQUJDRGtxFWbt2LXx8fNC9e/ci+6xatQoxMTGoXFneH+tEjkYrCUgSHxoyBxNOqxJ4CvuganYFUpaJWc3qLrj7WUVonuKSe0RkrH379li+fDlyc3OxYMECuLi46O+T/Pbbb00e4+PjAx8fH5vFuHr1avTt27fIpUOvXLmCHTt24KuvvrJZTETWogUg96YQPjSkw6fUrUSFDIzAPLyGT00mm/lv+CD7SCUmm0RUJC8vL9SsWRONGjXC6tWrcfjwYaxatarYY9avXw9vb+9it/379wPQ3YtZUFCAjIwMgzHS09ON7tM0Zf/+/UhKSsLrr79eZJ81a9YgICAAL7744qPfMBGVWpzhtIJmOIRXsRreyDFq01ZS4u6nFVEY7WmHyIjIWSkUCkycOBFjxozBK6+8Ak9P0z9D5FxSb9q0KVxdXZGQkKCfOU1KSkJqaioiIyMfGdOqVavQtGlTNGrUyGS7EAJr1qxB//794erKFdLI+WkhILEOp1mYcFqQF7LRD2vQAodMtue/6o17cwMAX04sE5F8PXv2xNixY7F06VL897//NdlHziV1X19fDB48GGPGjIG/vz9UKhVGjBiByMhIgweG6tSpg9jYWLz00kv6fVlZWdi8eTPmzZtX5Pi7d+9GSkpKsTOgRM6ET6mbjwmnhTTCMfTHKvghw6hNG6TE3aUBKHzBy/aBEVGp4eLiguHDh2P27Nl466234OX1+D9TFixYAIVCgR49eiA/Px8xMTFYtmyZQZ+kpCRkZmYa7Nu4cSOEEOjTp0+RY69atQpRUVGoU6fOY8dJ5Ag4w2k+SQhRps9EVlYWfH19kZmZCZVKVaJjBktf6v/bE3fRG/9Da/xisu8RtMQTV65DVFCWaOyMt54pUb/7ZJdFUhhf5i8OyyKRM8vLy0NKSgrCw8OLfKiFiBxHcd+z5vy+tpT7r13bZTaUkrxb4jTiHpIKx9klbkfCGU4zfPZjrO4/TuZAWnAF0k21UR/ho4R4OxRN295FXLOpJR7741vZsmK5pcx8dKcH3Ml7T1Z/IiIi0uEMp/mYcJojTwtp9XVI22+bbBbNfSBGVgL8eZM8ERERERNOuQ4cgDTsPKTrBUZNwlMB8UYI8Fx5gMu3ERERlSq6OpxyZzgJYMJZcnl5wJQpwNy5kEzc9ioaeUGMqgwEudkhOCIiIrI2IQFamfNJvKCuw4SzpPr3BzZvNtot3CWI14KBzgGAgrOaREREpZXufkzew2kOFoQsqfHjARfD/FzULQexpBbQpQKTTSIHVsaLcRA5DUf/XtVCmLURE86Sa9oUmDQJACBcJGgHBUPMrg5UcrdzYERUlPur29y9e9fOkRBRSdz/XnXUlak0EGZtxEvq8kycCPz9N6R334XUoEGJD3vteslf4jUzwiIi05RKJfz8/HDjxg0AQLly5SDxgT4ihyOEwN27d3Hjxg34+flBqSxZ7WpyHkw45XB1BdassXcURCRDcHAwAOiTTiJyXH5+fvrvWUfEezjNx4STiEo1SZIQEhKCwMBAqNXGizQQkWNwdXV1+JlNJpzmY8JJRGWCUql0+F9mROTYNJIWQpJXWVNrZiXOpUuXYs6cOUhLS0OjRo2wePFitGjRwqyxHAEfGiIiIiIqAVs9NLRp0yaMGTMGU6dOxfHjx9GoUSPExMQ49a1BTDiJiIiISkBrRrJpziX1+fPnY8iQIRg0aBDq1auHFStWoFy5cli9erUV3pVtMOEkIiIichAFBQU4duwYoqOj9fsUCgWio6ORmJhox8geT5m/h/N+kdmsrCw7R0JERERFuf972p7F4c9cGAyVSiXrmKysLISFfWCUZ7i7u8Pd3biW9z///AONRoOgoCCD/UFBQfjzzz/lB+0gynzCmZ2dDQAICwuzcyRERET0KNnZ2fD19bXpa7q5uSE4ONjsXMHb29vo2KlTp+KDDz6wQHTOocwnnKGhobh8+TJ8fHzKbEFo3V9fYbh8+bLsv9xKC54DHZ4HngOA5+A+ngfHOgdCCGRnZyM0NNTmr+3h4YGUlBQUFBSYdbwQwijHMDW7CQAVKlSAUqlEenq6wf709HSHrlH6KGU+4VQoFKhcubK9w3AIKpXK7j9Q7I3nQIfngecA4Dm4j+fBcc6BrWc2H+Th4QEPDw+rv46bmxuaNm2KhIQEdOvWDQCg1WqRkJCA4cOHW/31raXMJ5xEREREjmTMmDEYMGAAmjVrhhYtWmDhwoXIzc3FoEGD7B2a2ZhwEhERETmQXr164ebNm5gyZQrS0tLQuHFjxMfHGz1I5EyYcBLc3d0xderUIu8nKQt4DnR4HngOAJ6D+3geeA7safjw4U59Cf1hkrBnfQEiIiIiKvVY+J2IiIiIrIoJJxERERFZFRNOIiIiIrIqJpyl1NKlS1GtWjV4eHigZcuW+O2334rs+9lnn6FNmzYoX748ypcvj+joaKP+AwcOhCRJBlvHjh2t/TYei5xzEBcXZ/T+Hq63JoTAlClTEBISAk9PT0RHR+P8+fPWfhuPRc45aNeundE5kCQJnTt31vdxts/Bvn370KVLF4SGhkKSJGzbtu2Rx+zduxdPPvkk3N3dUbNmTcTFxRn1kXNe7U3uOdiyZQuee+45VKxYESqVCpGRkdixY4dBnw8++MDoc1CnTh0rvovHJ/c87N271+T3Q1pamkG/0vxZMPX9LkkS6tevr+/jjJ8Fsg8mnKXQpk2bMGbMGEydOhXHjx9Ho0aNEBMTgxs3bpjsv3fvXvTp0wd79uxBYmIiwsLC0KFDB1y9etWgX8eOHXH9+nX99uWXX9ri7ZhF7jkAdIWNH3x/ly5dMmifPXs2Fi1ahBUrVuDw4cPw8vJCTEwM8vLyrP12zCL3HGzZssXg/Z85cwZKpRI9e/Y06OdMn4Pc3Fw0atQIS5cuLVH/lJQUdO7cGe3bt8fJkycxatQovP766wYJlzmfLXuSew727duH5557Dj/++COOHTuG9u3bo0uXLjhx4oRBv/r16xt8Dn799VdrhG8xcs/DfUlJSQbvMzAwUN9W2j8Ln3zyicF7v3z5Mvz9/Y1+JjjbZ4HsRFCp06JFCzFs2DD91xqNRoSGhorY2NgSHV9YWCh8fHzE2rVr9fsGDBggunbtaulQrUbuOVizZo3w9fUtcjytViuCg4PFnDlz9PsyMjKEu7u7+PLLLy0WtyU97udgwYIFwsfHR+Tk5Oj3Odvn4EEAxNatW4vtM27cOFG/fn2Dfb169RIxMTH6rx/3vNpTSc6BKfXq1RPTpk3Tfz116lTRqFEjywVmYyU5D3v27BEAxJ07d4rsU9Y+C1u3bhWSJIm///5bv8/ZPwtkO5zhLGUKCgpw7NgxREdH6/cpFApER0cjMTGxRGPcvXsXarUa/v7+Bvv37t2LwMBA1K5dG2+99RZu3bpl0dgtxdxzkJOTg6pVqyIsLAxdu3bF2bNn9W0pKSlIS0szGNPX1xctW7Ys8Xm1JUt8DlatWoXevXvDy8vLYL+zfA7MkZiYaHDOACAmJkZ/zixxXp2NVqtFdna20c+D8+fPIzQ0FNWrV0ffvn2Rmppqpwitq3HjxggJCcFzzz2HAwcO6PeXxc/CqlWrEB0djapVqxrsLyufBXo8TDhLmX/++QcajcZoNYKgoCCje4+KMn78eISGhhr8IO3YsSPWrVuHhIQEzJo1C7/88gs6deoEjUZj0fgtwZxzULt2baxevRrffvstvvjiC2i1WkRFReHKlSsAoD/ucc6rLT3u5+C3337DmTNn8Prrrxvsd6bPgTnS0tJMnrOsrCzcu3fPIt9fzmbu3LnIycnByy+/rN/XsmVLxMXFIT4+HsuXL0dKSgratGmD7OxsO0ZqWSEhIVixYgW++eYbfPPNNwgLC0O7du1w/PhxAJb5WetMrl27hp9++snoZ0JZ+CyQZXClITIwc+ZMbNy4EXv37jV4aKZ37976/46IiEDDhg1Ro0YN7N27F88++6w9QrWoyMhIREZG6r+OiopC3bp18emnn+LDDz+0Y2T2sWrVKkRERKBFixYG+0v754AMbdiwAdOmTcO3335rcO9ip06d9P/dsGFDtGzZElWrVsVXX32FwYMH2yNUi6tduzZq166t/zoqKgoXLlzAggUL8L///c+OkdnH2rVr4efnh27duhnsLwufBbIMznCWMhUqVIBSqUR6errB/vT0dAQHBxd77Ny5czFz5kz8/PPPaNiwYbF9q1evjgoVKiA5OfmxY7a0xzkH97m6uqJJkyb693f/uMcZ05Ye5xzk5uZi48aNJfpl4cifA3MEBwebPGcqlQqenp4W+Ww5i40bN+L111/HV199ZXSbwcP8/PzwxBNPlJrPQVFatGihf49l6bMghMDq1avx6quvws3Nrdi+ZeWzQPIx4Sxl3Nzc0LRpUyQkJOj3abVaJCQkGMzgPWz27Nn48MMPER8fj2bNmj3yda5cuYJbt24hJCTEInFbkrnn4EEajQanT5/Wv7/w8HAEBwcbjJmVlYXDhw+XeExbepxzsHnzZuTn56Nfv36PfB1H/hyYIzIy0uCcAcDOnTv158wSny1n8OWXX2LQoEH48ssvDcpiFSUnJwcXLlwoNZ+Dopw8eVL/HsvKZwEAfvnlFyQnJ5foj9Cy8lkgM9j7qSWyvI0bNwp3d3cRFxcn/vjjDzF06FDh5+cn0tLShBBCvPrqq+K9997T9585c6Zwc3MTX3/9tbh+/bp+y87OFkIIkZ2dLf773/+KxMREkZKSInbt2iWefPJJUatWLZGXl2eX9/gocs/BtGnTxI4dO8SFCxfEsWPHRO/evYWHh4c4e/asvs/MmTOFn5+f+Pbbb8Xvv/8uunbtKsLDw8W9e/ds/v5KQu45uK9169aiV69eRvud8XOQnZ0tTpw4IU6cOCEAiPnz54sTJ06IS5cuCSGEeO+998Srr76q73/x4kVRrlw5MXbsWHHu3DmxdOlSoVQqRXx8vL7Po86ro5F7DtavXy9cXFzE0qVLDX4eZGRk6Pu8++67Yu/evSIlJUUcOHBAREdHiwoVKogbN27Y/P2VlNzzsGDBArFt2zZx/vx5cfr0aTFy5EihUCjErl279H1K+2fhvn79+omWLVuaHNMZPwtkH0w4S6nFixeLKlWqCDc3N9GiRQtx6NAhfVvbtm3FgAED9F9XrVpVADDapk6dKoQQ4u7du6JDhw6iYsWKwtXVVVStWlUMGTLEYX+o3ifnHIwaNUrfNygoSDz//PPi+PHjBuNptVoxefJkERQUJNzd3cWzzz4rkpKSbPV2zCLnHAghxJ9//ikAiJ9//tloLGf8HNwvbfPwdv99DxgwQLRt29bomMaNGws3NzdRvXp1sWbNGqNxizuvjkbuOWjbtm2x/YXQlYoKCQkRbm5uolKlSqJXr14iOTnZtm9MJrnnYdasWaJGjRrCw8ND+Pv7i3bt2ondu3cbjVuaPwtC6Mq/eXp6ipUrV5oc0xk/C2QfkhBCWHkSlYiIiIjKMN7DSURERERWxYSTiIiIiKyKCScRERERWRUTTiIiIiKyKiacRERERGRVTDiJiIiIyKqYcBIRERGRVTHhJCIiIiKrYsJJREREj23fvn3o0qULQkNDIUkStm3bJuv4pKQktG/fHkFBQfDw8ED16tUxadIkqNVq6wRMNsWEk4hKhX79+sHDwwN//fWXUdvMmTMhSRK2b99uh8iIyobc3Fw0atQIS5cuNet4V1dX9O/fHz///DOSkpKwcOFCfPbZZ5g6daqFIyV74NKWRFQq3LhxA3Xq1EHjxo2xe/du/f6UlBTUr18fzz//PL7++ms7RkhUdkiShK1bt6Jbt276ffn5+Xj//ffx5ZdfIiMjAw0aNMCsWbPQrl27IscZM2YMjhw5gv3791s/aLIqznASUakQGBiIWbNmYc+ePVi7dq1+/9tvvw1XV1d88skndoyOiIYPH47ExERs3LgRv//+O3r27ImOHTvi/PnzJvsnJycjPj4ebdu2tXGkZA2c4SSiUkMIgTZt2iApKQl//vkndu7ciT59+mDRokUYMWKEvcMjKjMenuFMTU1F9erVkZqaitDQUH2/6OhotGjRAh9//LF+X1RUFI4fP478/HwMHToUy5cvh0LB+TFn52LvAIiILEWSJHz66ado0qQJ3nrrLezfvx/NmjXDsGHD7B0aUZl2+vRpaDQaPPHEEwb78/PzERAQYLBv06ZNyM7OxqlTpzB27FjMnTsX48aNs2W4ZAVMOImoVKlfvz7++9//IjY2FkqlEj/88ANnR4jsLCcnB0qlEseOHYNSqTRo8/b2Nvg6LCwMAFCvXj1oNBoMHToU7777rtFx5FyYcBJRqVOhQgUAQGhoKBo0aGDnaIioSZMm0Gg0uHHjBtq0aVPi47RaLdRqNbRaLRNOJ8eEk4hKlcuXL2Pq1Klo0KABzpw5g9mzZ2PSpEn2Douo1MvJyUFycrL+65SUFJw8eRL+/v544okn0LdvX/Tv3x/z5s1DkyZNcPPmTSQkJKBhw4bo3Lkz1q9fD1dXV0RERMDd3R1Hjx7FhAkT0KtXL7i6utrxnZEl8KEhIipVunbtit27d+PcuXMYM2YMvv/+e5w9exbVq1e3d2hEpdrevXvRvn17o/0DBgxAXFwc1Go1ZsyYgXXr1uHq1auoUKECWrVqhWnTpiEiIgKbNm3C7Nmz8ddff0EIgapVq6Jfv34YPXo0PDw87PCOyJKYcBJRqbF161Z0794dCxYswKhRo3Dt2jXUrVsXUVFR+Omnn+wdHhFRmcWEk4hKhezsbNSrVw8VK1bEkSNH9Pd7LVq0CCNHjsRXX32Fnj172jlKIqKyiQknEZUKI0eOxJIlS3Do0CE0b95cv1+j0aBFixZIS0vDn3/+CR8fHztGSURUNrFWCBE5vWPHjmHp0qV4++23DZJNAFAqlVixYgXS0tL48BARkZ1whpOIiIiIrIoznERERERkVUw4iYiIiMiqmHASERERkVUx4SQiIiIiq2LCSURERERWxYSTiIiIiKyKCScRERERWRUTTiIiIiKyKiacRERERGRVTDiJiIiIyKqYcBIRERGRVTHhJCIiIiKrYsJJRERERFb1f7lMCKTzAVraAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "fig, ax = spatial_correlation_plotter.get_heatmap_figure()\n", + "fig.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The function `get_heatmap_figure` returns a figure and axis object that can be used to further customize the plot. The figure can be saved to a file using the `savefig` method of the figure object.\n", + "\n", + "The function has many parameters that can be used to customize the plot:" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Help on function get_heatmap_figure in module tapenade.analysis.spatial_correlation._spatial_correlation_plotter:\n", + "\n", + "get_heatmap_figure(self, bins: tuple = (40, 40), show_individual_cells: bool = False, show_linear_fit: bool = True, normalize_quantities: bool = False, extent_X: tuple = None, extent_Y: tuple = None, percentiles_X: tuple = (0, 100), percentiles_Y: tuple = (0, 100), log_scale_X: bool = False, log_scale_Y: bool = False, figsize: tuple = (7, 4), label_X: str = 'X', label_Y: str = 'Y', colormap: str = 'plasma', sample_fraction: float = 0.005, display_quadrants: bool = False, fig_ax_tuple: tuple = None)\n", + " Create a heatmap of the spatial correlation between two quantities X and Y.\n", + " \n", + " Parameters:\n", + " - bins: list of two integers, number of bins in each dimension\n", + " - show_individual_cells: bool, if True and if labels was specified in __init__,\n", + " individual cells will be displayed as a scatter plot on top of the heatmap.\n", + " - show_linear_fit: bool, if True, a linear fit will be displayed on the heatmap.\n", + " - normalize_quantities: bool, if True, the quantities will be normalized to have\n", + " zero mean and unit standard deviation.\n", + " - extent_X: tuple of two floats, the extent of the X axis\n", + " - extent_Y: tuple of two floats, the extent of the Y axis\n", + " - percentiles_X: tuple of two floats, only the values of X between these percentiles\n", + " will be considered\n", + " - percentiles_Y: tuple of two floats, the percentiles of Y between thse percentiles\n", + " will be considered\n", + " - log_scale_X: bool, if True, the X axis will be displayed in log scale\n", + " - log_scale_Y: bool, if True, the Y axis will be displayed in log scale\n", + " - figsize: tuple of two floats, the size of the figure\n", + " - label_X: str, the label of the X axis\n", + " - label_Y: str, the label of the Y axis\n", + " - colormap: str, the name of the colormap\n", + " - sample_fraction: float, if labels has not been specified in __init__, sample_fraction\n", + " represents the fraction of the total number of values of X and Y that will be\n", + " randomly selected to be used for the computation of the statistics.\n", + " - display_quadrants: bool, if True, quadrants will be displayed as vertical and horizontal\n", + " dashed lines at the Otsu thresholds of X and Y.\n", + " - fig_ax_tuple: tuple of matplotlib figure and axis, if not None, the heatmap will be\n", + " displayed on the provided figure and axis.\n", + " \n", + " Returns:\n", + " - fig: matplotlib figure\n", + " - ax: matplotlib axis\n", + "\n" + ] + } + ], + "source": [ + "help(SpatialCorrelationPlotter.get_heatmap_figure)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "napari-mine", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.16" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/src/tapenade/notebooks/spectral_filtering_notebook.ipynb b/src/tapenade/notebooks/spectral_filtering_notebook.ipynb new file mode 100644 index 0000000..a95cdcd --- /dev/null +++ b/src/tapenade/notebooks/spectral_filtering_notebook.ipynb @@ -0,0 +1,152 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "

πŸ—οΈ

\n", + "This notebook is still under construction !\n", + "

⚠️

\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### After clicking on a code cell, press \"Shift+Enter\" to run the code, or click on the \"Run\" button in the toolbar above.
\n", + "\n", + "### Replace \"...\" signs with the appropriate path to your data.\n", + "
" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "import tifffile\n", + "from pathlib import Path\n", + "from tapenade import get_path_to_demo_folder" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Loading spectral patterns" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "path_to_data = get_path_to_demo_folder()\n", + "\n", + "channels=4 #fill up here the number of channels\n", + "species = np.array([1,2,3,4])\n", + "#species = np.array([1,2]) #for GFP and Alexa488\n", + "spectralpatterns=np.zeros((221,channels,species.shape[0]))\n", + "\n", + "for i in species:\n", + " filepath = Path(path_to_data) / f'species{i}_medfilt.npy'\n", + " with open(filepath, 'rb') as f2:\n", + " avgspectrum_i=np.load(f2)\n", + " spectralpatterns[:,:,i-1] = avgspectrum_i[:,:]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Filtering" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "img = tifffile.imread(path_to_data / '03_Hoechst_Ecad_Bra_Sox2.tif')\n", + "\n", + "# Initialize filtered image array with same shape as input, int16 for intermediate calculations\n", + "image_filtered = np.zeros_like(img).astype(np.int16)\n", + "\n", + "for z in range(img.shape[0]):\n", + " # Compute mean intensity for each channel at z-slice\n", + " Iavg_channels = np.mean(img[z,:,:,:], axis=(1,2))\n", + " # Create diagonal normalization matrix\n", + " D = np.diag(1 / Iavg_channels)\n", + " # Prepare spectral patterns for this z-slice\n", + " specpatterns_z = np.zeros((species.shape[0], img.shape[1]))\n", + " for i in species:\n", + " specpatterns_z[i-1, :] = spectralpatterns[z, :, i-1]\n", + " # Compute unmixing weights\n", + " w = np.linalg.inv(specpatterns_z @ D @ np.transpose(specpatterns_z)) @ specpatterns_z @ D\n", + " # Apply unmixing to each species\n", + " for i in species:\n", + " image_filtered[z, i-1, :, :] = sum([w[i-1, j] * img[z, j, :, :] for j in range(0, channels)])\n", + "\n", + "# Clip negative values and convert to uint16 for saving\n", + "image_filtered[image_filtered < 0] = 0\n", + "image_filtered = image_filtered.astype(np.uint16)\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Plots results" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "z_to_plot = 50 # index of the z-slice to plot\n", + "\n", + "fout, axarrout = plt.subplots(1,channels, figsize=(20, 5)) \n", + "\n", + "for i in range(channels):\n", + " axarrout[i].imshow(img[z_to_plot,i,:,:])\n", + "\n", + "fout2, axarrout2 = plt.subplots(1,channels, figsize=(20, 5))\n", + "\n", + "for i in range(channels):\n", + " axarrout2[i].imshow(image_filtered[z_to_plot,i,:,:])\n", + "\n", + "fout.tight_layout()\n", + "fout2.tight_layout()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "napari-mine", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.16" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/src/tapenade/preprocessing/_labels_masking.py b/src/tapenade/preprocessing/_labels_masking.py index 38691ca..b56729b 100644 --- a/src/tapenade/preprocessing/_labels_masking.py +++ b/src/tapenade/preprocessing/_labels_masking.py @@ -31,5 +31,6 @@ def _remove_labels_outside_of_mask( if volume_inside < prop.area: labels_roi = labels[prop.slice] labels_roi[labels_roi == prop.index] = 0 + labels[prop.slice] = labels_roi return labels diff --git a/src/tapenade/preprocessing/_preprocessing.py b/src/tapenade/preprocessing/_preprocessing.py index c377ab1..296d47e 100644 --- a/src/tapenade/preprocessing/_preprocessing.py +++ b/src/tapenade/preprocessing/_preprocessing.py @@ -842,8 +842,8 @@ def segment_stardist( thresholds_dict=thresholds_dict, ) - from stardist import gputools_available + # from stardist simport gputools_available # if gputools_available(): # COMMENT FOR NOW # _purge_gpu_memory() diff --git a/src/tapenade/preprocessing/_thresholding.py b/src/tapenade/preprocessing/_thresholding.py index 6a725a6..ff5eee7 100644 --- a/src/tapenade/preprocessing/_thresholding.py +++ b/src/tapenade/preprocessing/_thresholding.py @@ -55,10 +55,13 @@ def _snp_threshold_binarization( snp_array = sigma2 * blurred snp_mask = snp_array > 0 - snp_array = np.log( - snp_array, - where=np.logical_and(nonzero_mask, snp_mask) if registered_image else snp_mask - ) + if registered_image: + snp_array = np.log( + snp_array, + where=np.logical_and(nonzero_mask, snp_mask) + ) + else: + snp_array = np.log(snp_array, where=snp_mask) threshold = threshold_otsu(snp_array[snp_mask]) * threshold_factor diff --git a/src/tapenade/preprocessing/segmentation_postprocessing.py b/src/tapenade/preprocessing/segmentation_postprocessing.py index 3b12032..bd6496b 100644 --- a/src/tapenade/preprocessing/segmentation_postprocessing.py +++ b/src/tapenade/preprocessing/segmentation_postprocessing.py @@ -2,6 +2,7 @@ from tqdm import tqdm import numpy as np from tqdm.contrib.concurrent import process_map +from skimage.measure import regionprops from tapenade.preprocessing._labels_masking import ( _remove_labels_outside_of_mask, @@ -92,7 +93,6 @@ def find_seg_errors(segmentation: np.ndarray, image: np.ndarray): return intensity_distribution - def tresh_distribution( intensity_distribution: np.ndarray, threshold: float, @@ -134,13 +134,14 @@ def remove_small_objects(segmentation: np.ndarray, min_size: int): The modified segmentation array. """ seg_filt = np.copy(segmentation) - unique_labels, label_counts = np.unique(segmentation, return_counts=True) - - smallest_labels = unique_labels[np.argsort(label_counts)] - smallest_volumes = np.sort(label_counts) - for label, volume in zip(smallest_labels, smallest_volumes): - if volume X) - - return X[mask] - - else: - - downX, upX = percentilesX - - if percentilesY is None: - downY, upY = percentilesX - else: - downY, upY = percentilesY - - percentile_downX = np.percentile(X, downX) - percentile_downY = np.percentile(Y, downY) - - percentile_upX = np.percentile(X, upX) - percentile_upY = np.percentile(Y, upY) - - maskX = np.logical_and(percentile_downX <= X, percentile_upX >= X) - maskY = np.logical_and(percentile_downY <= Y, percentile_upY >= Y) - - mask = np.logical_and(maskX, maskY) - - return X[mask], Y[mask] - - -def linear_fit( - x, - y, - robust: bool = False, - return_r2: bool = False, - robust_params_init: tuple = None, - robust_f_scale: float = None, -): - - if not robust: - res = linregress(x, y) - - if return_r2: - return res.intercept, res.slope, res.rvalue**2 - else: - return res.intercept, res.slope - - else: - - def f(params, x, y): - return params[0] + params[1] * x - y - - if robust_params_init is None: - robust_params_init = np.ones(2) - - res_robust = least_squares( - f, - robust_params_init, - args=(x, y), - loss="soft_l1", - f_scale=robust_f_scale, - ) - - if return_r2: - raise NotImplementedError - else: - return res_robust.x[0], res_robust.x[1]