From 0eb69b84c8db76863b0ebff85935e5d1480b8e54 Mon Sep 17 00:00:00 2001 From: Zuza Gawrysiak Date: Thu, 14 Dec 2023 22:32:12 +0100 Subject: [PATCH] Release v2.1.0 (#189) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: store user compounds data in the filesystem * test: add unit tests to LocalFileStorage * chore: add missing newlines, remove extra newlines * Add base plate class * Add plate reading from dir * add missing dependencies * echo file processing * add echo files parser + tests * update dependencies v2 * add missing newlines * add missing newlines :)) * Add docs * add suggestions * resolve styling issue * Refactor BMG files reading * Add tests * Add summary visualizations * Modify summary tuple method * Fix docstring * Modify test for plate * Optimize dataframe reading * Define PlateSummary type * 72 set up pre-commit (#75) * Add pre-commit * Add black workflow * Reformat files * 73 detect outliers control values (#77) * Fix colours in control values plot * Add outliers detection * Add test for outliers * Apply review fixes * Fix tests * Separate plate_array from df * Fix typing and docs * Change plot types * Change plates viz to plotly * 74 inhibitionactivation values (#79) * Fix colours in control values plot * Add outliers detection * Add test for outliers * Apply review fixes * Fix tests * Separate plate_array from df * compute inhibition/activation * merge activation/inhibition with echo files * check upon the values * fix combining echo bmg files * drop unnecessary columns * remove outliers from the resultant df * split compounds/controls, add z-score * add visualizations * add tests * improve docs and plot --------- Co-authored-by: Zuza Gawrysiak * closes #80 general restructuring (#81) * chore: merge src into dashboard * update: set basic pages structure * update: add stages placeholders to Primary Screening process * chore: for the time being move old code to LEGACY folder, remove outdated layouts * feat: add decorator for error handling * fix: correct import in a unit test * update: make data_folder a class attribute to facilitate global configuration * fix: correct LocalFileStorage test * chore: remove redundant style attr * Plot improvements (#85) * Filter low quality plates * Make plot grid size modifiable * Add z per plate plot * Change template to plotly_white * Apply review fixes * Add plotly template as const * Stage 1 and Stage 4 uploading (#86) * Add reading ioFile in bmg * Saving BML in Storage * Add echo reading * Add coments * Resolve tests problems * Pre-commit * Pre-commit changes * Solve problems * Bmg refactor * Bmg refactor * Echo parser refactor * Fix * Fix comment * 82 implement stage 2 (#90) * fix: add correct name to elements property, set stages container style to take full width * fix: subplots' titles not visibile correctly in heatmaps plot * feat: implement paginated heatmap plates viewer * add datatable for statistics preview, extend controls * add docstring * fix: sizing on smaller screens * Add stage 3 in dash (#89) * Add stage 3 in dash * Add z threshold slider * Refactor filtering lq plates * Change return annotation * 83 implement stage 5 in dashboard (#92) * add stage 5 table * add plots ad datatable * add interactive z-score * change act/inh/zscore plots * add RangeSlider * change data table styling * resolve imports/typos * Add stage 3 improvements (#95) * Change view of stage 1 and 4 (#93) * Change view of stage 1 and 4 * Resolve problems * Divide info into two parts * Change view * Change view * closes 96 update stage 2 layout (#97) * adjust stage 2 layout * hide plot controls * disable zooming and panning on heatmap plots * closes 98 add report stage revamp process controls (#99) * add placeholder for stage 6 * add functional controls component * remove old function for creating controls component * add icons to prev/next stage buttons * add border to controls component * remove commented code * closes 101 add correlation process (#104) * chore: rename primary_screening package to screening * update: rename primary-screening process page to screening * update: dummy element to be created per-process page * feat: add correlation process basis * feat: fill in first stage html * feat: add files parsing and validation placeholder * feat: implement remaining stages * Export screening results to csv file (#107) * add save report v1 * download echo_bmg_combined.csv * add custom csv name * clean callbacks.py * update delete_file * remove custom file name * remove imports * fix csv export (#112) * Report generation (#108) * Generate simple raport * Add storage for report data * Add plot in raport * Resolve problems * Clean code * Add secondary screening plots (#103) * Add secondary screening plots * Add concentration calculation * Fix docstring * Save report to download (#113) * 115 deanonymize compounds (#117) * Add eos * Update regex * Add eos to correlation stage * Fix test * Fix well naming issues * Remove unnecessary changes * Fix test * Remove rows without eos * closes 116 UI for hit validation process (#119) * Add hit validation process page stages, implement first stage * add parameters setting to first stage * adjust parameter change callback * implement second stage of hit validation process * remove step from concentration bounds inputs * reorder pages in nav bar * Update INH/ACT/Z-SCORE plots (#114) * add area charts * add hline * add ranges to act/inh * add filter criteria * add well/plate distinction * filter compounds to save * alter serialization * include bmg controls * fix test * fix test #2 * Pipeline check and fixes (#120) * change z-score calculation * handle different echo files * alter act/inh * fix test * fix tests & add exceptions button * filter low quality plates * change the origin * Add url to EOS datable (#129) * Add eos url * Fix styling * Save app settings to JSON file (#131) * Save json from primmary screening * clean * pre-commit * Rename methods and dicts * Add saving settings in correlation and hit validation * Style buttons * closes 126 implement third process (#133) * remove .venv from git tracking * implement basic parsing * implement file parsing and graph creation * implement csv downloading * remove unneeded files --------- Co-authored-by: njytwf * closes 138 control hit plot (#139) * fix initial process callbacks not setting stored uuid * implement stacking control * round top/bottom values * closes 140 configuration for hit determination (#142) * ui for controls * parametrize hit determination * connect state variables with the configuration callback * remove whitespace * Add plots to screening report (#143) * Add data projections (#132) * update combining process * add data projection process * add dropdowns * add eos links, save file * remove an outdated test * alter combine_assays_for_projection * add loading sign/dynamic table * add controls to the report * Revert "add controls to the report" This reverts commit 14328d2d8903f5cf8ffe658402f948cc79ef73b5. * remove apply button * add controls to the plot * revert legacy change * include both ACT & INH to projections * update umap package version * remove legacy, return uuid initially * Implement SMILES predictor (#141) * Train xgboost * Predict on ecbd data * Add smoter * Save preds to pq * Fix plotted eoses (#151) * Add predictions to dashboard (#146) * Add predictions to dashboard * Add SMILES plotting * Change predictions file to pq * Add missing requirements * Comment out umap (#153) * Comment out umap * Format notebook * Update default projection * merge inh/act into feature (#144) * merge inh/act into feature * remove functools.partial wrapper * 136 Save individual EOS report (#155) * Add statistics * Create report * Add concentration for 50% modulation * Uppercase letter * closes 147 app redesign 💅💅 (#154) * navbar and homepage re-design * make logo smaller, set correct logo src * redesign about page * reformat file * add smartart to about page * Correlation report (#157) * Create report correlation * Create report correlation * Fix Correlation stage * Add typing * Fix concentration_50 statistic (#161) * Create report correlation * Fix concentration_50 * closes 159 minor UI adjustments (#160) * add link * make main pages responsive * hit browser styling fixes * adjust stacking controls styling * replace buttons list with searchable dropdown for hit browser component selection * fix individual report generation * Add structural similarity (#156) * Add structural similarity * Small refactor * Fix file upload * Improve clustering * Move plot to plots * Update colors * Export eos plots to the final report (#168) * add report generation * add dependencies * alter saved plots * fix: handle case when last page is of size 0 (#169) * closes 165 update projections (#170) * add 3d projection plots * facilitate selected datapoints download for Visualization stage * facilitate selected datapoints download for Similarity stage * disable "Download selected" buttons when 3d plotting enabled * 147 app redesign v2 (#162) * unify plots view, add loaders * add page blockers * divide eos/echo loaders * remove unnecessary comments * change select file color * correct heatmap loader * alter upload text * add info icon * alter info icon position * fix info icon * Add ML experiment setup (#176) * Add ml experiment setup * Add feature selection, hp tuning and docs * Split projection stage (#174) * split stages * add projections processes descriptions * closes 166 minor styling (#175) * unify precision in screening process * fix unit display for concentration50 * add kaleido dependency * unify precision in data projection process * add thousands delimiter * restyle controls to include process name * replace screening statistics charts with bar plots * adjust styling for tooltip * allow to extend tooltip with custom styling * add bottom padding to page container * add tooltips to hit validation process * move tooltip annotation to components module * add tooltips to Screening process * reduce process title size * add tooltips to data projection process * extend docstring of the annotating function * add tooltips to correlation process * adjust concentration slider desc * update sliders tooltip descriptions for correlation * adjust coloring of mean value bar plots * add missing information in various descriptions * render error message when user uploads less than 3 screening files in data projection process * readd missing stage * restore controls chart to scatter plot * Update version (#179) * Update version * Remove dev * fix responsiveness (#181) * Add reproducibility (#177) * Reproducing Screening and Correlation * Reproducing Hit Validation * Remove checkbox changning * Change gitignore * Add alerts * Resolve * Reports refactor (#178) * Reproducing Screening and Correlation * Reproducing Hit Validation * Remove checkbox changning * Create Header in reports * Merge develop * restore changes from broken branch (#185) * After testing adjustments (#184) * add projections smiles info * add modulation_50/concentration_50 to report * add low quality plate csv * fix test * correct typos/pca_smiles_summary * remove pca_smiles_summary * Add activity filtering (#171) * Add activity filtering * Update layout * Add top and bottom thresholds * Add lines and separate threshold change * Add save button * Move button and rename cols * Update button * Refactor uploading files (#187) * Add text on upload * Individual text * Refactor * Add missing words * Resolve merge problem * Bump version (#188) --------- Co-authored-by: Bartosz Stachowiak Co-authored-by: Bartosz Stachowiak <72276326+Tremirre@users.noreply.github.com> Co-authored-by: azywot Co-authored-by: AndrzejKaj <101563276+AndrzejKaj@users.noreply.github.com> Co-authored-by: Agata <82370491+azywot@users.noreply.github.com> Co-authored-by: njytwf --- .gitignore | 2 +- dashboard/app.py | 2 +- dashboard/assets/style.css | 11 + dashboard/data/bmg_plate.py | 45 ++- dashboard/data/determination.py | 39 ++- dashboard/data/json_reader.py | 21 ++ dashboard/data/preprocess.py | 2 +- dashboard/data/validation.py | 4 + dashboard/pages/components.py | 31 +- dashboard/pages/correlation/callbacks.py | 211 ++++++++++++- .../pages/correlation/report/report.html | 66 +++- .../stages/s1_correlation_files_input.py | 44 ++- .../stages/s2_correlation_plots.py | 82 ++++- dashboard/pages/data_projection/page.py | 5 +- .../data_projection_screening/callbacks.py | 8 + .../stages/s1_projection_input.py | 2 +- .../pages/data_projection_smiles/callbacks.py | 43 +-- .../stages/s1_smiles_input.py | 4 +- .../stages/s2_smiles_display.py | 40 +++ dashboard/pages/hit_validation/callbacks.py | 125 ++++++-- .../pages/hit_validation/report/report.html | 64 +++- .../stages/s1_screening_data_input.py | 56 +++- dashboard/pages/screening/callbacks.py | 291 ++++++++++++++++-- dashboard/pages/screening/report/report.html | 64 +++- .../pages/screening/stages/s1_bmg_input.py | 70 ++++- .../screening/stages/s2_outliers_purging.py | 14 +- .../pages/screening/stages/s4_echo_input.py | 4 +- .../pages/screening/stages/s5_summary.py | 2 + dashboard/pages/screening/stages/s6_report.py | 28 +- dashboard/visualization/plots.py | 49 ++- dashboard/visualization/text_tables.py | 3 +- tests/bmg_plate_test.py | 6 +- 32 files changed, 1233 insertions(+), 205 deletions(-) create mode 100644 dashboard/data/json_reader.py diff --git a/.gitignore b/.gitignore index 8bb4da9..7d3cfd2 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,7 @@ tmp/ **/out/ # Data -data +./data data/raw/* !data/raw/.gitkeep notebooks/data diff --git a/dashboard/app.py b/dashboard/app.py index 8e80637..d17d557 100644 --- a/dashboard/app.py +++ b/dashboard/app.py @@ -10,7 +10,7 @@ FONT_AWESOME_CDN = ( "https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css" ) -VERSION = "v2.0.0" +VERSION = "v2.1.0" fs_dir = os.environ.get("DRUG_SCREENING_DATA_DIR", ".drug-screening-data") diff --git a/dashboard/assets/style.css b/dashboard/assets/style.css index fe35564..019a9a7 100644 --- a/dashboard/assets/style.css +++ b/dashboard/assets/style.css @@ -89,6 +89,12 @@ column-gap: 2rem; } +.grid-1-1-projections { + display: grid; + grid-template-columns: repeat(2, 1fr); + justify-content: center; + width: 50%; + } .upload-box { width: 100%; @@ -277,4 +283,9 @@ main.grid-1-1-1-1 { grid-template-columns: repeat(1, 1fr); } + + main.grid-1-1-projections { + grid-template-columns: repeat(1, 1fr); + width: 100%; + } } diff --git a/dashboard/data/bmg_plate.py b/dashboard/data/bmg_plate.py index e0e0321..5c99f89 100644 --- a/dashboard/data/bmg_plate.py +++ b/dashboard/data/bmg_plate.py @@ -1,9 +1,13 @@ import io import numpy as np import pandas as pd +import logging + from collections import namedtuple from enum import Enum, auto +logger = logging.getLogger(__name__) + PlateSummary = namedtuple( "PlateSummary", @@ -159,30 +163,46 @@ def parse_bmg_file(filename: str, filecontent: io.StringIO) -> np.ndarray: df = pd.read_csv(filecontent, header=None) plate = df.to_numpy() break - well, value = line.split() + if not line.strip(): + continue + cells = line.split() + if len(cells) != 2: + raise ValueError( + f"Wrong format of file {filename} - line {i} has {len(cells)} cells instead of 2" + ) + well, value = cells i, j = well_to_ids(well) plate[i, j] = value return barcode, plate -def parse_bmg_files(files: tuple[str, io.StringIO]) -> tuple[pd.DataFrame, np.ndarray]: +def parse_bmg_files( + files: tuple[str, io.StringIO] +) -> tuple[pd.DataFrame, np.ndarray, dict[str, str]]: """ Parse file from iostring with BMG files to DataFrame :param files: tuple containing names and content of files - :return: DataFrame with BMG files (=plates) as rows + :param failed_files: dictionary with failed files + :return: DataFrame with BMG files (=plates) as rows, + plates values as np.array and failed files with errors """ plate_summaries = [] plate_values = [] + failed_files = {} for filename, filecontent in files: - barcode, plate_array = parse_bmg_file(filename, filecontent) - plate = Plate(barcode, plate_array) - z_wo, outliers_mask = calculate_z_outliers(plate) - plate_summaries.append(get_summary_tuple(plate, z_wo)) - plate_values.append([plate.plate_array, outliers_mask]) + try: + barcode, plate_array = parse_bmg_file(filename, filecontent) + plate = Plate(barcode, plate_array) + z_wo, outliers_mask = calculate_z_outliers(plate) + plate_summaries.append(get_summary_tuple(plate, z_wo)) + plate_values.append([plate.plate_array, outliers_mask]) + except Exception as e: + logger.warning(f"Error while parsing file {filename}: {e}") + failed_files[filename] = str(e) df = pd.DataFrame(plate_summaries) plate_values = np.asarray(plate_values) - return df, plate_values + return df, plate_values, failed_files def calculate_activation_inhibition_zscore( @@ -261,17 +281,18 @@ def get_activation_inhibition_zscore_dict( def filter_low_quality_plates( df: pd.DataFrame, plate_array: np.ndarray, threshold: float = 0.5 -) -> tuple[pd.DataFrame, np.ndarray]: +) -> tuple[pd.DataFrame, pd.DataFrame, np.ndarray]: """ Remove plates with Z factor lower than threshold :param df: DataFrame with control values :param plate_array: array with plate values :param threshold: Z factor threshold value - :return: high quality plates + :return: high quality plates, low quality plates, high quality plate array """ quality_mask = df.z_factor > threshold quality_df = df[quality_mask] + low_quality_df = df[~quality_mask][["barcode", "z_factor"]] low_quality_ids = np.where(quality_mask == False) quality_plates = np.delete(plate_array, low_quality_ids, axis=0) - return quality_df, quality_plates + return quality_df, low_quality_df, quality_plates diff --git a/dashboard/data/determination.py b/dashboard/data/determination.py index cbf369e..6fbc188 100644 --- a/dashboard/data/determination.py +++ b/dashboard/data/determination.py @@ -1,6 +1,5 @@ -import pandas as pd import numpy as np - +import pandas as pd from scipy.optimize import curve_fit @@ -33,7 +32,29 @@ def find_argument_four_param_logistic( :param slope: the steepness of the curve :return: argument of the function for given y """ - return ic50 * ((lower_limit - upper_limit) / (y - upper_limit) - 1) ** (1 / slope) + x = ic50 * ((lower_limit - upper_limit) / (y - upper_limit) - 1) ** (1 / slope) + if type(x) == complex: + x = np.nan + return x + + +def calculate_modulation_ic50_and_concentration_50(row: pd.Series) -> pd.Series: + """ + Calculates modulation_ic50 and concentration_50 (concentration for modulation = 50) for given row. + + :param row: row of the dataframe + :return: row with calculated modulation_ic50 and concentration_50 + """ + modulation_ic50 = four_param_logistic( + row["ic50"], row["BOTTOM"], row["TOP"], row["ic50"], row["slope"] + ) + concentration_50 = find_argument_four_param_logistic( + 50, row["BOTTOM"], row["TOP"], row["ic50"], row["slope"] + ) + + return pd.Series( + {"modulation_ic50": modulation_ic50, "concentration_50": concentration_50} + ) def curve_fit_for_activation(screen_df: pd.DataFrame) -> pd.DataFrame: @@ -140,7 +161,17 @@ def process_activation_df( & (activation_df.ic50 < concentration_upper_bound) & (activation_df.ic50 > concentration_lower_bound) ) - return activation_df + + cols = activation_df.columns.to_list() + modulation_concentration = ["modulation_ic50", "concentration_50"] + pos = cols.index("slope") + column_order = cols[:pos] + modulation_concentration + cols[pos:] + + activation_df[modulation_concentration] = activation_df.apply( + lambda row: calculate_modulation_ic50_and_concentration_50(row), axis=1 + ) + + return activation_df[column_order] def perform_hit_determination( diff --git a/dashboard/data/json_reader.py b/dashboard/data/json_reader.py new file mode 100644 index 0000000..8bf6464 --- /dev/null +++ b/dashboard/data/json_reader.py @@ -0,0 +1,21 @@ +import base64 +import io +import json + + +def load_data_from_json(content: str | None, name: str | None) -> dict | None: + if content is None: + return None + file = None + + _, extension = name.split(".") + if extension == "json": + _, content_string = content.split(",") + decoded = base64.b64decode(content_string) + file = io.StringIO(decoded.decode("utf-8")) + + loaded_data = None + if file: + loaded_data = json.load(file) + + return loaded_data diff --git a/dashboard/data/preprocess.py b/dashboard/data/preprocess.py index 4eca156..6884b26 100644 --- a/dashboard/data/preprocess.py +++ b/dashboard/data/preprocess.py @@ -162,5 +162,5 @@ def calculate_concentration( :param summary_assay_volume: to divide by :return: dataframe """ - df["Concentration"] = df["Actual Volume_y"] * concetration / summary_assay_volume + df["Concentration"] = df["Actual Volume_1"] * concetration / summary_assay_volume return df diff --git a/dashboard/data/validation.py b/dashboard/data/validation.py index b70795c..242db42 100644 --- a/dashboard/data/validation.py +++ b/dashboard/data/validation.py @@ -10,6 +10,10 @@ def validate_correlation_dataframe(corr_df: pd.DataFrame) -> None: """ if not corr_df.columns.is_unique: raise ValueError("Column names must be unique.") + if not ("% ACTIVATION" in corr_df.columns or "% INHIBITION" in corr_df.columns): + raise ValueError("Column with ACTIVATION/INHIBITION not found") + if not "EOS" in corr_df.columns: + raise ValueError("Column with EOS not found") ... # TODO: add more validation diff --git a/dashboard/pages/components.py b/dashboard/pages/components.py index 4a339af..3d5531a 100644 --- a/dashboard/pages/components.py +++ b/dashboard/pages/components.py @@ -26,6 +26,9 @@ dcc.Store(id="report-data-hit-validation-input", storage_type="local"), dcc.Store(id="report-data-hit-validation-hit-browser", storage_type="local"), dcc.Store(id="activation-inhibition-screening-options", storage_type="local"), + dcc.Store(id="loaded-setings-screening", storage_type="local"), + dcc.Store(id="loaded-setings-correlation", storage_type="local"), + dcc.Store(id="loaded-setings-hit-validation", storage_type="local"), ], ) @@ -213,7 +216,7 @@ def make_file_list_component( className="col", children=html.Ul( children=[ - html.Li(name.split(".")[0]) + html.Li(name) for name in successfull_filenames[i::num_cols] ] ), @@ -239,7 +242,7 @@ def make_file_list_component( className="col", children=html.Ul( children=[ - html.Li(name.split(".")[0]) + html.Li(name) for name in failed_filenames[i::num_cols] ] ), @@ -298,3 +301,27 @@ def annotate_with_tooltip( element.children = [element.children] element.children.insert(0, tooltip) return element + + +def make_new_upload_view( + text1: str, + text2: str, +) -> list[html.Div]: + """ + Prepare children for drag and drop zone. + + :param text1: text for first element, it should be response on uploaded file. + :param text2: text to inform that new file can still be uploaded + :return: list of html.Div + """ + return [ + html.Div(text1), + html.Div( + [ + "Drag and Drop or ", + html.A("Select", className="select-file"), + " ", + text2, + ], + ), + ] diff --git a/dashboard/pages/correlation/callbacks.py b/dashboard/pages/correlation/callbacks.py index 556eb1f..044e3e9 100644 --- a/dashboard/pages/correlation/callbacks.py +++ b/dashboard/pages/correlation/callbacks.py @@ -4,14 +4,15 @@ import json import uuid from datetime import datetime +from typing import Tuple import pandas as pd import pyarrow as pa -from dash import Input, Output, State, callback, html, no_update -from plotly import express as px +from dash import Input, Output, State, callback, dcc, html, no_update from plotly import graph_objects as go from dashboard.data import validation +from dashboard.data.json_reader import load_data_from_json from dashboard.data.preprocess import calculate_concentration from dashboard.storage import FileStorage from dashboard.visualization.plots import ( @@ -21,6 +22,7 @@ from dashboard.pages.correlation.report.generate_jinja_report import ( generate_jinja_report, ) +from dashboard.pages.components import make_new_upload_view # === STAGE 1 === @@ -40,7 +42,7 @@ def on_file_upload( content: str | None, stored_uuid: str, file_storage: FileStorage, store_suffix: str -) -> tuple[html.I, str]: +) -> Tuple[html.I, str]: """ Callback for file upload. It saves the file to the storage and returns an icon indicating the status of the upload. @@ -54,7 +56,7 @@ def on_file_upload( :return: session uuid """ if content is None: - return no_update, no_update, no_update + return no_update if stored_uuid is None: stored_uuid = str(uuid.uuid4()) @@ -64,13 +66,26 @@ def on_file_upload( corr_df = pd.read_csv(io.StringIO(decoded)) validation.validate_correlation_dataframe(corr_df) except Exception as e: - return ICON_ERROR, stored_uuid + return ( + ICON_ERROR, + make_new_upload_view( + f'File uploading error. File should contain "EOS" and also "% INHIBITION" or "% ACTIVATION" keys.', + "new Screening file (.csv)", + ), + no_update, + stored_uuid, + ) saved_name = f"{stored_uuid}_{store_suffix}.pq" file_storage.save_file(saved_name, corr_df.to_parquet()) - return ICON_OK, no_update, stored_uuid + return ( + ICON_OK, + make_new_upload_view("File uploaded successfully", "new Screening file (.csv)"), + no_update, + stored_uuid, + ) def on_both_files_uploaded( @@ -111,6 +126,35 @@ def on_both_files_uploaded( return ICON_OK, False +def upload_settings_data(content: str | None, name: str | None) -> dict: + """ + Callback for file upload. It saves the in local storage for other components. + + :param content: base64 encoded file content + :param name: filename + :return: dict with loaded data + """ + if not content: + return no_update + loaded_data = load_data_from_json(content, name) + color = "success" + text = "Settings uploaded successfully" + settings_keys = ["concentration_value", "volume_value"] + if loaded_data == None or not set(settings_keys).issubset(loaded_data.keys()): + color = "danger" + text = ( + f"Invalid settings uploaded: the file should contain {settings_keys} keys." + ) + return ( + loaded_data, + True, + html.Span(text), + color, + make_new_upload_view(text, "new Settings file (.json)"), + no_update, + ) + + # === STAGE 2 === @@ -120,7 +164,7 @@ def on_visualization_stage_entry( volume_value: int, stored_uuid: str, file_storage: FileStorage, -) -> tuple[go.Figure, go.Figure]: +) -> Tuple[go.Figure, go.Figure]: """ Callback for visualization stage entry. It loads the data from the storage and returns the figures. @@ -142,15 +186,19 @@ def on_visualization_stage_entry( df_secondary = pd.read_parquet( pa.BufferReader(file_storage.read_file(saved_name_2)) ) - df_merged = pd.merge(df_primary, df_secondary, on="EOS", how="inner") + df_merged = pd.merge( + df_primary, df_secondary, on="EOS", how="inner", suffixes=["_0", "_1"] + ) + df_merged.drop(["Unnamed: 0_0", "Unnamed: 0_1"], axis=1, inplace=True) df = calculate_concentration(df_merged, concentration_value, volume_value) + file_storage.save_file(f"{stored_uuid}_correlation_df.pq", df.to_parquet()) - feature = "% ACTIVATION" if "% ACTIVATION_x" in df.columns else "% INHIBITION" - concentration_fig = concentration_plot(df, feature[2:]) + feature = "% ACTIVATION" if "% ACTIVATION_0" in df.columns else "% INHIBITION" + concentration_fig = concentration_plot(df, feature[2:]) feature_fig = concentration_confirmatory_plot( - df[f"{feature}_x"], - df[f"{feature}_y"], + df[f"{feature}_0"], + df[f"{feature}_1"], df["Concentration"], f"{feature[2:]}", ) @@ -163,10 +211,98 @@ def on_visualization_stage_entry( full_html=False, include_plotlyjs="cdn" ), } - return feature_fig, concentration_fig, report_data_correlation_plots, False +def on_threshold_change( + threshold_1: float, + threshold_2: float, + stored_uuid: str, + file_storage: FileStorage, +): + """ + Callback for threshold update, updates the plot + + :param threshold_1: first threshold + :param threshold_2: second threshold + :param stored_uuid: session uuid + :param file_storage: file storage + :return: figures + """ + saved_name = f"{stored_uuid}_correlation_df.pq" + df = pd.read_parquet(pa.BufferReader(file_storage.read_file(saved_name))) + feature = "% ACTIVATION" if "% ACTIVATION_0" in df.columns else "% INHIBITION" + + new_fig = concentration_plot( + df, + feature[2:], + threshold_1, + threshold_2, + ) + return new_fig + + +def on_save_filtering_clicked( + n_clicks: int, + threshold_1: float, + threshold_2: float, + stored_uuid: str, + file_storage: FileStorage, +) -> None: + """ + Callback for the save filtered button + + :param n_clicks: number of clicks + :param threshold_1: first threshold + :param threshold_2: second threshold + :param stored_uuid: uuid of the stored data + :param file_storage: storage object + :return: None + """ + saved_name = f"{stored_uuid}_correlation_df.pq" + df = pd.read_parquet(pa.BufferReader(file_storage.read_file(saved_name))) + feature = "% ACTIVATION" if "% ACTIVATION_0" in df.columns else "% INHIBITION" + + filename = f"correlation_threshold_{datetime.now().strftime('%Y-%m-%d')}.csv" + df["> threshold_one"] = False + df.loc[df[f"{feature}_0"] > threshold_1, "> threshold_one"] = True + df["> threshold_two"] = False + df.loc[df[f"{feature}_0"] > threshold_2, "> threshold_two"] = True + + file_storage.save_file(f"{stored_uuid}_filtered_correlation_df.pq", df.to_parquet()) + + return dcc.send_data_frame(df.to_csv, filename) + + +def on_visualization_stage_entry_load_settings( + current_stage: int, + concentration: float, + volume: float, + saved_data: dict, +) -> Tuple[float, float]: + """ + Callback for visualization stage entry. + Loads the data from local storage and update sliders value + + :param current_stage: current stage index of the process + :param concentration: concentration slider value + :param volume: volume slider value + :return: value for concentration slider + :return: value for volume slider + """ + + if current_stage != 1: + return no_update + + concentration_value = concentration + volume_value = volume + if saved_data != None: + concentration_value = saved_data["concentration_value"] + volume_value = saved_data["volume_value"] + + return concentration_value, volume_value + + # === STAGE 3 === @@ -177,7 +313,11 @@ def on_json_generate_button_click( filename = ( f"correlation_analysis_settings_{datetime.now().strftime('%Y-%m-%d')}.json" ) - json_object = json.dumps(correlation_plots_report, indent=4) + data_to_save = { + "concentration_value": correlation_plots_report["concentration_value"], + "volume_value": correlation_plots_report["volume_value"], + } + json_object = json.dumps(data_to_save, indent=4) return dict(content=json_object, filename=filename) @@ -197,6 +337,7 @@ def on_save_report_button_click(n_clicks: int, report_data: dict) -> dict: def register_callbacks(elements, file_storage: FileStorage): callback( Output("file-1-status", "children"), + Output("upload-file-1", "children"), Output("dummy-upload-file-1", "children"), Output("user-uuid", "data", allow_duplicate=True), Input("upload-file-1", "contents"), @@ -210,6 +351,7 @@ def register_callbacks(elements, file_storage: FileStorage): callback( Output("file-2-status", "children"), + Output("upload-file-2", "children"), Output("dummy-upload-file-2", "children"), Output("user-uuid", "data", allow_duplicate=True), Input("upload-file-2", "contents"), @@ -230,7 +372,19 @@ def register_callbacks(elements, file_storage: FileStorage): )(functools.partial(on_both_files_uploaded, file_storage=file_storage)) callback( - Output("inhibition-graph", "figure"), + Output("loaded-setings-correlation", "data"), + Output("alert-upload-settings-correlation", "is_open"), + Output("alert-upload-settings-correlation-text", "children"), + Output("alert-upload-settings-correlation", "color"), + Output("upload-settings-correlation", "children"), + Output("dummy-upload-settings-correlation", "children"), + Input("upload-settings-correlation", "contents"), + Input("upload-settings-correlation", "filename"), + prevent_initial_call=True, + )(functools.partial(upload_settings_data)) + + callback( + Output("feature-graph", "figure"), Output("concentration-graph", "figure"), Output("report-data-correlation-plots", "data"), Output({"type": elements["BLOCKER"], "index": 1}, "data"), @@ -239,6 +393,33 @@ def register_callbacks(elements, file_storage: FileStorage): Input("volume-slider", "value"), State("user-uuid", "data"), )(functools.partial(on_visualization_stage_entry, file_storage=file_storage)) + + callback( + Output("concentration-graph", "figure", allow_duplicate=True), + Input("activity-threshold-bottom-input", "value"), + Input("activity-threshold-top-input", "value"), + State("user-uuid", "data"), + prevent_initial_call=True, + )(functools.partial(on_threshold_change, file_storage=file_storage)) + + callback( + Output("download-filtered-csv", "data"), + Input("save-filtered-button", "n_clicks"), + State("activity-threshold-bottom-input", "value"), + State("activity-threshold-top-input", "value"), + State("user-uuid", "data"), + prevent_initial_call=True, + )(functools.partial(on_save_filtering_clicked, file_storage=file_storage)) + + callback( + Output("concentration-slider", "value"), + Output("volume-slider", "value"), + Input(elements["STAGES_STORE"], "data"), + State("concentration-slider", "value"), + State("volume-slider", "value"), + State("loaded-setings-correlation", "data"), + )(functools.partial(on_visualization_stage_entry_load_settings)) + callback( Output("download-json-settings-correlation", "data"), Input("generate-json-button", "n_clicks"), diff --git a/dashboard/pages/correlation/report/report.html b/dashboard/pages/correlation/report/report.html index ccbca1b..f853ee5 100644 --- a/dashboard/pages/correlation/report/report.html +++ b/dashboard/pages/correlation/report/report.html @@ -26,25 +26,75 @@ } h3 { - text-align: left; + text-align: center; font-weight: bold; font-size: 24px; } + + header { + background-color: black; + color: white; + text-align: center; + display: flex; + justify-content: space-between; + align-items: center; + padding: 3px; + } + + .logopart1 { + margin-left: 66px; + font-weight: bold; + font-size: 52px; + } + + .logopart2 { + font-size: 25px; + font-weight: 200; + font-style: italic; + } + + .title { + font-size: 65px; + font-style: italic; + margin-left: 205px; + } + + .data { + font-size: 20px; + font-style: italic; + position: absolute; + top: 80px; + right: 30px; + } + + .logo { + text-decoration: underline; + text-decoration-thickness: 0.1em; + text-decoration-color: blue; + } - Correlation analysis + Correlation Analysis -

- Report {{current_day}} -

-

- {{current_time}} -

+
+
+ +
+ Correlation Analysis +
+
+ Generated {{current_day}} +
+
+

Concentration

diff --git a/dashboard/pages/correlation/stages/s1_correlation_files_input.py b/dashboard/pages/correlation/stages/s1_correlation_files_input.py index 4e70fd5..6f4f7ac 100644 --- a/dashboard/pages/correlation/stages/s1_correlation_files_input.py +++ b/dashboard/pages/correlation/stages/s1_correlation_files_input.py @@ -2,6 +2,8 @@ from dashboard.pages.components import annotate_with_tooltip +import dash_bootstrap_components as dbc + DESC = [ html.Span( """ @@ -36,7 +38,7 @@ [ "Drag and Drop or ", html.A("Select", className="select-file"), - " Screening file #1", + " Screening file #1 (.csv)", ] ), multiple=False, @@ -61,11 +63,11 @@ [ "Drag and Drop or ", html.A("Select", className="select-file"), - " Screening file #2", + " Screening file #2 (.csv)", ] ), multiple=False, - className="text-center upload-box", + className="text-center upload-box p-0 m-0", ), html.Div(id="dummy-upload-file-2"), ], @@ -73,6 +75,42 @@ ), ], ), + html.Div( + className="flex-grow-1", + children=[ + html.H5("Settings File"), + dcc.Loading( + children=[ + dcc.Upload( + id="upload-settings-correlation", + accept=".json", + children=html.Div( + [ + "Drag and Drop or ", + html.A("Select", className="select-file"), + " Settings for correlation analysis (.json)", + ] + ), + multiple=False, + className="text-center upload-box", + ), + html.Div( + id="dummy-upload-settings-correlation", + className="p-1", + ), + ], + type="circle", + ), + dbc.Alert( + html.Div(id="alert-upload-settings-correlation-text"), + id="alert-upload-settings-correlation", + dismissable=True, + is_open=False, + duration=4000, + className="m-1", + ), + ], + ), ], ) diff --git a/dashboard/pages/correlation/stages/s2_correlation_plots.py b/dashboard/pages/correlation/stages/s2_correlation_plots.py index 20f0da4..73742c6 100644 --- a/dashboard/pages/correlation/stages/s2_correlation_plots.py +++ b/dashboard/pages/correlation/stages/s2_correlation_plots.py @@ -1,6 +1,8 @@ from dash import html, dcc from dashboard.pages.components import annotate_with_tooltip +from dashboard.visualization.text_tables import make_download_button_text + CONCENTRATION_SLIDER_DESC = """ Choose the concentration to be used for the final compound concentration calculation that will be @@ -20,8 +22,14 @@ className="col", children=[ dcc.Loading( - id="loading-inhibition-graph", - children=[dcc.Graph(id="inhibition-graph")], + id="loading-feature-graph", + children=[ + dcc.Graph( + id="feature-graph", + className="six columns", + style={"width": "100%"}, + ) + ], type="circle", ) ], @@ -31,9 +39,75 @@ children=[ dcc.Loading( id="loading-concentration-graph", - children=[dcc.Graph(id="concentration-graph")], + children=[ + dcc.Graph( + id="concentration-graph", + className="six columns", + style={"width": "100%"}, + ) + ], type="circle", - ) + ), + html.Div( + className="row", + children=[ + html.Div( + className="col", + children=[ + html.Span( + children=[ + html.Label( + children="Set the first threshold", + className="form-label", + ), + dcc.Input( + id="activity-threshold-bottom-input", + type="number", + value=0, + min=-50, + className="form-control", + ), + ], + className="flex-grow-1", + ), + ], + ), + html.Div( + className="col", + children=[ + html.Span( + children=[ + html.Label( + children="Set the second threshold", + className="form-label", + ), + dcc.Input( + id="activity-threshold-top-input", + type="number", + value=100, + min=0, + className="form-control", + ), + ], + className="flex-grow-1", + ), + ], + ), + html.Div( + className="mt-3 mb-1 d-flex justify-content-center", + children=[ + html.Button( + make_download_button_text( + "Save filtered dataframe" + ), + className="btn btn-primary btn-lg btn-block btn-report", + id="save-filtered-button", + ), + dcc.Download(id="download-filtered-csv"), + ], + ), + ], + ), ], ), ], diff --git a/dashboard/pages/data_projection/page.py b/dashboard/pages/data_projection/page.py index d6713ed..70cb752 100644 --- a/dashboard/pages/data_projection/page.py +++ b/dashboard/pages/data_projection/page.py @@ -31,13 +31,12 @@ }, ] -children = [html.Div(), None, None, html.Div()] -children[1:3] = [make_card_component(**card_data) for card_data in CARDS_DATA] +children = [make_card_component(**card_data) for card_data in CARDS_DATA] pb = PageBuilder(name=NAME) pb.extend_layout( layout=html.Main( - className="h-100 flex-grow-1 grid-1-1-1-1 gap-3 mx-auto my-5 container-xxl", + className="h-100 flex-grow-1 grid-1-1-projections gap-3 mx-auto my-5 container-xxl", children=children, ) ) diff --git a/dashboard/pages/data_projection_screening/callbacks.py b/dashboard/pages/data_projection_screening/callbacks.py index 3ede18f..7da98cf 100644 --- a/dashboard/pages/data_projection_screening/callbacks.py +++ b/dashboard/pages/data_projection_screening/callbacks.py @@ -19,6 +19,7 @@ from dashboard.storage import FileStorage from dashboard.visualization.plots import make_projection_plot, plot_projection_2d from dashboard.visualization.text_tables import pca_summary, table_from_df +from dashboard.pages.components import make_new_upload_view PROJECTION_SETUP = [ (PCA(n_components=3), "PCA"), @@ -72,6 +73,9 @@ def on_projection_files_upload( ], className="alert alert-danger", ), + make_new_upload_view( + "You need to upload at least 3 files.", "new Screening files (.csv)" + ), stored_uuid, no_update, True, @@ -99,6 +103,9 @@ def on_projection_files_upload( make_file_list_component(filenames, [], 1), ], ), + make_new_upload_view( + "Files uploaded successfully", "new Screening files (.csv)" + ), stored_uuid, no_update, False, @@ -322,6 +329,7 @@ def on_save_projections_click( def register_callbacks(elements, file_storage: FileStorage): callback( Output("projections-file-message", "children"), + Output("upload-projection-data", "children"), Output("user-uuid", "data", allow_duplicate=True), Output("dummy-upload-projection-data", "children"), Output({"type": elements["BLOCKER"], "index": 0}, "data"), diff --git a/dashboard/pages/data_projection_screening/stages/s1_projection_input.py b/dashboard/pages/data_projection_screening/stages/s1_projection_input.py index f18c592..943ffe1 100644 --- a/dashboard/pages/data_projection_screening/stages/s1_projection_input.py +++ b/dashboard/pages/data_projection_screening/stages/s1_projection_input.py @@ -26,7 +26,7 @@ [ "Drag and Drop or ", html.A("Select", className="select-file"), - " Screening files", + " Screening files (.csv)", ] ), multiple=True, diff --git a/dashboard/pages/data_projection_smiles/callbacks.py b/dashboard/pages/data_projection_smiles/callbacks.py index b5d3822..54e679a 100644 --- a/dashboard/pages/data_projection_smiles/callbacks.py +++ b/dashboard/pages/data_projection_smiles/callbacks.py @@ -9,41 +9,15 @@ import plotly.graph_objects as go import pyarrow as pa from dash import Input, Output, State, callback, dcc, html, no_update -from sklearn.decomposition import PCA -from umap import UMAP -from dashboard.data.controls import controls_index_annotator, generate_controls -from dashboard.data.preprocess import MergedAssaysPreprocessor from dashboard.data.structural_similarity import prepare_cluster_viz from dashboard.data.utils import eos_to_ecbd_link from dashboard.pages.components import make_file_list_component from dashboard.storage import FileStorage -from dashboard.visualization.plots import ( - make_projection_plot, - plot_clustered_smiles, - plot_projection_2d, -) -from dashboard.visualization.text_tables import pca_summary, table_from_df - -PROJECTION_SETUP = [ - (PCA(n_components=3), "PCA"), - ( - UMAP( - n_components=2, - n_neighbors=10, - min_dist=0.1, - ), - "UMAP", - ), - ( - UMAP( - n_components=3, - n_neighbors=10, - min_dist=0.1, - ), - "UMAP3D", - ), -] +from dashboard.visualization.plots import plot_clustered_smiles + +from dashboard.visualization.text_tables import table_from_df +from dashboard.pages.components import make_new_upload_view def on_3d_checkbox_change(plot_3d: List[str]) -> bool: @@ -79,7 +53,11 @@ def on_hit_validation_upload( activity_decoded = base64.b64decode(contents.split(",")[1]).decode("utf-8") activity_df = pd.read_csv(io.StringIO(activity_decoded), dtype="str") file_storage.save_file(f"{stored_uuid}_activity_df.pq", activity_df.to_parquet()) - return stored_uuid, None # dummy activity upload return + return ( + make_new_upload_view("File uploaded", "new Hit Validation file (.csv)"), + stored_uuid, + None, + ) # dummy activity upload return def on_smiles_files_upload( @@ -129,6 +107,7 @@ def on_smiles_files_upload( make_file_list_component([filename, smiles_filename], [], 1), ], ), + make_new_upload_view("File uploaded", "new SMILES file (.csv)"), False, # next stage button disabled status stored_uuid, None, # dummy smiles upload return @@ -220,6 +199,7 @@ def on_smiles_download_selection_button_click( def register_callbacks(elements, file_storage: FileStorage): callback( + Output("upload-activity-data", "children"), Output("user-uuid", "data", allow_duplicate=True), Output("dummy-upload-activity-data", "children"), Input("upload-activity-data", "contents"), @@ -228,6 +208,7 @@ def register_callbacks(elements, file_storage: FileStorage): )(functools.partial(on_hit_validation_upload, file_storage=file_storage)) callback( Output("smiles-file-message", "children"), + Output("upload-smiles-data", "children"), Output({"type": elements["BLOCKER"], "index": 0}, "data"), Output("user-uuid", "data", allow_duplicate=True), Output("dummy-upload-smiles-data", "children"), diff --git a/dashboard/pages/data_projection_smiles/stages/s1_smiles_input.py b/dashboard/pages/data_projection_smiles/stages/s1_smiles_input.py index cb513fd..755d453 100644 --- a/dashboard/pages/data_projection_smiles/stages/s1_smiles_input.py +++ b/dashboard/pages/data_projection_smiles/stages/s1_smiles_input.py @@ -8,7 +8,7 @@ "upload_text": [ "Drag and Drop or ", html.A("Select", className="select-file"), - " Hit Validation file", + " Hit Validation file (.csv)", ], "dummy_id": "dummy-upload-activity-data", }, @@ -19,7 +19,7 @@ "upload_text": [ "Drag and Drop or ", html.A("Select", className="select-file"), - " SMILES file", + " SMILES file (.csv)", ], "dummy_id": "dummy-upload-smiles-data", }, diff --git a/dashboard/pages/data_projection_smiles/stages/s2_smiles_display.py b/dashboard/pages/data_projection_smiles/stages/s2_smiles_display.py index 3e37b22..41a10ca 100644 --- a/dashboard/pages/data_projection_smiles/stages/s2_smiles_display.py +++ b/dashboard/pages/data_projection_smiles/stages/s2_smiles_display.py @@ -2,6 +2,19 @@ from dashboard.pages.components import annotate_with_tooltip +PROJECTION_SMILES_INFO_TEXT = html.Span( + [ + """To calculate the projections, Extended-Connectivity Fingerprints (ECFP) were used. +Fingerprints were calculated using RDKit method """, + html.A( + "GetMorganFingerprintAsBitVect", + href="https://www.herongyang.com/Cheminformatics/Fingerprint-RDKit-Morgan-GetMorganFingerprintAsBitVect.html", + target="_blank", + ), + ] +) + + DOWNLOAD_SELECTION_BUTTON_DESC = """ Download the data of the compounds that are currently selected on the plot using box or lasso selection to a csv file containing projection values and compound data. @@ -98,5 +111,32 @@ ), ], ), + html.Div( + className="row", + children=[ + html.Div( + children=[ + html.Div( + className="row", + children=[ + html.Details( + [ + html.Summary( + html.Strong( + "ADDITIONAL PROJECTION INFORMATION" + ) + ), + html.Ul( + PROJECTION_SMILES_INFO_TEXT, + ), + ] + ) + ], + ), + ], + className="col-md-6", + ), + ], + ), ] ) diff --git a/dashboard/pages/hit_validation/callbacks.py b/dashboard/pages/hit_validation/callbacks.py index d33b67e..132ea6e 100644 --- a/dashboard/pages/hit_validation/callbacks.py +++ b/dashboard/pages/hit_validation/callbacks.py @@ -6,6 +6,7 @@ from datetime import datetime import dash_dangerously_set_inner_html as dhtml +import numpy as np import pandas as pd import pyarrow as pa from dash import ( @@ -25,12 +26,15 @@ four_param_logistic, perform_hit_determination, ) +from dashboard.data.json_reader import load_data_from_json from dashboard.pages.hit_validation.report.generate_report import ( generate_hit_valildation_report, generate_jinja_report, ) from dashboard.storage import FileStorage from dashboard.visualization.plots import plot_ic50, plot_smiles +from dashboard.pages.components import make_new_upload_view +from dashboard.data.json_reader import load_data_from_json SCREENING_FILENAME = "{0}_screening_df.pq" HIT_FILENAME = "{0}_hit_df.pq" @@ -62,7 +66,7 @@ def on_file_upload( :return: session uuid """ if content is None: - return no_update, no_update, no_update, no_update + return no_update, no_update, no_update, no_update, no_update if stored_uuid is None: stored_uuid = str(uuid.uuid4()) @@ -108,7 +112,11 @@ def on_file_upload( className="text-danger", ), no_update, + make_new_upload_view( + "File uploading error", "new Hit Validation input file (.csv)" + ), stored_uuid, + no_update, ) # screening df needs to be safed for plots @@ -161,7 +169,15 @@ def on_file_upload( ), ], ) - return result_msg, None, stored_uuid, False + return ( + result_msg, + None, + make_new_upload_view( + "File uploaded successfully", "new Hit Validation input file (.csv)" + ), + stored_uuid, + False, + ) FAIL_BOUNDS_ELEMENT = html.Div( @@ -177,6 +193,69 @@ def on_file_upload( ) +def upload_settings_data( + content: str | None, + name: str | None, + concentration_lower_bound: float, + concentration_upper_bound: float, + top_lower_bound: float, + top_upper_bound: float, +) -> tuple[float, float, float, float]: + """ + Callback for file upload. It update concentration lower bound, + concentration upper bound, top lower bound, top upper bound + + :param content: base64 encoded file content + :param name: filename + :param concentration_lower_bound: concentration lower bound + :param concentration_upper_bound: concentration upper bound + :param top_lower_bound: top lower bound + :param top_upper_bound: top upper bound + :return: concentration lower bound + :return: concentration upper bound + :return: top lower bound + :return: top_upper_bound + """ + if not content: + return no_update + loaded_data = load_data_from_json(content, name) + settings_keys = [ + "concentration_lower_bound", + "concentration_upper_bound", + "top_lower_bound", + "top_upper_bound", + ] + if loaded_data == None or not set(settings_keys).issubset(loaded_data.keys()): + concentration_lower_bound_value = concentration_lower_bound + concentration_upper_bound_value = concentration_upper_bound + top_lower_bound_value = top_lower_bound + top_upper_bound_value = top_upper_bound + color = "danger" + text = ( + f"Invalid settings uploaded: the file should contain {settings_keys} keys." + ) + + else: + concentration_lower_bound_value = loaded_data["concentration_lower_bound"] + concentration_upper_bound_value = loaded_data["concentration_upper_bound"] + top_lower_bound_value = loaded_data["top_lower_bound"] + top_upper_bound_value = loaded_data["top_upper_bound"] + color = "success" + text = "Settings uploaded successfully" + + return ( + concentration_lower_bound_value, + concentration_upper_bound_value, + top_lower_bound_value, + top_upper_bound_value, + True, + html.Span(text), + color, + make_new_upload_view(text, "new Settings file (.json)"), + no_update, + ) + + def on_bounds_change( lower_bound: float, upper_bound: float ) -> tuple[float, float, html.Div]: @@ -289,22 +368,6 @@ def on_selected_compound_changed( graph = plot_ic50(entry, concentrations, values) - modulation_ic50 = four_param_logistic( - entry["ic50"], - entry["BOTTOM"], - entry["TOP"], - entry["ic50"], - entry["slope"], - ) - - concentration_50 = find_argument_four_param_logistic( - 50, - entry["BOTTOM"], - entry["TOP"], - entry["ic50"], - entry["slope"], - ) - smiles_row = pd.read_parquet("dashboard/assets/ml/predictions.pq").loc[ lambda df: df["EOS"] == selected_compound ] @@ -316,14 +379,14 @@ def on_selected_compound_changed( smiles_html = dhtml.DangerouslySetInnerHTML(smiles_graph) text_concentration_50 = "NaN" - if type(concentration_50) != complex: - text_concentration_50 = f"{concentration_50:,.5f}" + if entry["concentration_50"] != np.nan: + text_concentration_50 = f"{entry['concentration_50']:,.5f}" result = { "min_modulation": f"{entry['min_value']:,.5f}", "max_modulation": f"{entry['max_value']:,.5f}", "ic50": f"{entry['ic50']:,.5f}", - "modulation_ic50": f"{modulation_ic50:,.5f}", + "modulation_ic50": f"{entry['modulation_ic50']:,.5f}", "concentration_50": text_concentration_50, "curve_slope": f"{entry['slope']:,.5f}", "r2": f"{entry['r2'] * 100:,.5f}", @@ -440,6 +503,7 @@ def register_callbacks(elements, file_storage: FileStorage): callback( Output("screening-file-message", "children"), Output("dummy-upload-screening-data", "children"), + Output("upload-screening-data", "children"), Output("user-uuid", "data", allow_duplicate=True), Output({"type": elements["BLOCKER"], "index": 0}, "data"), Input("upload-screening-data", "contents"), @@ -451,6 +515,25 @@ def register_callbacks(elements, file_storage: FileStorage): prevent_initial_call="initial_duplicate", )(functools.partial(on_file_upload, file_storage=file_storage)) + callback( + Output("concentration-lower-bound-input", "value"), + Output("concentration-upper-bound-input", "value"), + Output("top-lower-bound-input", "value"), + Output("top-upper-bound-input", "value"), + Output("alert-upload-settings-hit-validation", "is_open"), + Output("alert-upload-settings-hit-validation-text", "children"), + Output("alert-upload-settings-hit-validation", "color"), + Output("upload-settings-hit-validation", "children"), + Output("dummy-upload-settings-hit-validation", "children"), + Input("upload-settings-hit-validation", "contents"), + Input("upload-settings-hit-validation", "filename"), + State("concentration-lower-bound-input", "value"), + State("concentration-upper-bound-input", "value"), + State("top-lower-bound-input", "value"), + State("top-upper-bound-input", "value"), + prevent_initial_call=True, + )(functools.partial(upload_settings_data)) + callback( Output("concentration-lower-bound-store", "data"), Output("concentration-upper-bound-store", "data"), diff --git a/dashboard/pages/hit_validation/report/report.html b/dashboard/pages/hit_validation/report/report.html index 52b74fc..0103fb4 100644 --- a/dashboard/pages/hit_validation/report/report.html +++ b/dashboard/pages/hit_validation/report/report.html @@ -26,10 +26,52 @@ } h3 { - text-align: left; + text-align: center; font-weight: bold; font-size: 24px; } + + header { + background-color: black; + color: white; + text-align: center; + display: flex; + justify-content: space-between; + align-items: center; + padding: 3px; + } + + .logopart1 { + margin-left: 66px; + font-weight: bold; + font-size: 52px; + } + + .logopart2 { + font-size: 25px; + font-weight: 200; + font-style: italic; + } + + .title { + font-size: 65px; + font-style: italic; + margin-left: 285px; + } + + .data { + font-size: 20px; + font-style: italic; + position: absolute; + top: 80px; + right: 30px; + } + + .logo { + text-decoration: underline; + text-decoration-thickness: 0.1em; + text-decoration-color: blue; + } @@ -39,12 +81,20 @@ -

- {{id}} -

-

- {{current_day}}, {{current_time}} -

+
+
+ +
+ {{id}} +
+
+ Generated {{current_day}} +
+
+

Graph

diff --git a/dashboard/pages/hit_validation/stages/s1_screening_data_input.py b/dashboard/pages/hit_validation/stages/s1_screening_data_input.py index 76981ea..e22904c 100644 --- a/dashboard/pages/hit_validation/stages/s1_screening_data_input.py +++ b/dashboard/pages/hit_validation/stages/s1_screening_data_input.py @@ -1,5 +1,6 @@ from dash import dcc, html +import dash_bootstrap_components as dbc from dashboard.pages.components import annotate_with_tooltip ACTIVY_DETERMINATION_PARAMS_DESC = """ @@ -124,29 +125,66 @@ ), ], ), - dcc.Loading( + html.Div( children=[ - html.Div( + dcc.Loading( + children=[ + html.Div( + children=[ + dcc.Upload( + id="upload-screening-data", + accept=".csv", + children=html.Div( + [ + "Drag and Drop or ", + html.A( + "Select", + className="select-file", + ), + " Hit Validation input file (.csv)", + ] + ), + multiple=False, + className="text-center", + ), + html.Div(id="dummy-upload-screening-data"), + ], + className="upload-box m-1", + ), + ], + type="circle", + ), + dcc.Loading( children=[ dcc.Upload( - id="upload-screening-data", - accept=".csv", + id="upload-settings-hit-validation", + accept=".json", children=html.Div( [ "Drag and Drop or ", html.A("Select", className="select-file"), - " Hit Validation input file", + " Settings for hit validation (.json)", ] ), multiple=False, - className="text-center", + className="text-center upload-box m-1", + ), + html.Div( + id="dummy-upload-settings-hit-validation", + className="p-1", ), - html.Div(id="dummy-upload-screening-data"), ], - className="upload-box", + type="circle", + ), + dbc.Alert( + html.Div(id="alert-upload-settings-hit-validation-text"), + id="alert-upload-settings-hit-validation", + dismissable=True, + is_open=False, + duration=4000, + className="m-1", ), ], - type="circle", ), ], className="grid-1-1", diff --git a/dashboard/pages/screening/callbacks.py b/dashboard/pages/screening/callbacks.py index ff11361..5e1a1ab 100644 --- a/dashboard/pages/screening/callbacks.py +++ b/dashboard/pages/screening/callbacks.py @@ -22,6 +22,7 @@ ) from dashboard.data.bmg_plate import filter_low_quality_plates, parse_bmg_files +from dashboard.data.json_reader import load_data_from_json from dashboard.data.combine import ( aggregate_well_plate_stats, combine_bmg_echo_data, @@ -44,6 +45,7 @@ make_filter_radio_options, make_summary_stage_datatable, ) +from dashboard.pages.components import make_new_upload_view def on_next_button_click(n_clicks): @@ -55,7 +57,7 @@ def on_next_button_click(n_clicks): def upload_bmg_data(contents, names, last_modified, stored_uuid, file_storage): if contents is None: - return no_update, no_update, no_update, no_update + return no_update, no_update, no_update, no_update, no_update if not stored_uuid: stored_uuid = str(uuid.uuid4()) @@ -70,22 +72,60 @@ def upload_bmg_data(contents, names, last_modified, stored_uuid, file_storage): decoded = base64.b64decode(content_string) bmg_files.append((filename, io.StringIO(decoded.decode("utf-8")))) - if bmg_files: - bmg_df, val = parse_bmg_files(tuple(bmg_files)) - stream = io.BytesIO() - np.savez_compressed(stream, val) - stream.seek(0) - file_storage.save_file(f"{stored_uuid}_bmg_val.npz", stream.read()) - file_storage.save_file(f"{stored_uuid}_bmg_df.pq", bmg_df.to_parquet()) + if not bmg_files: + return no_update, no_update, no_update, no_update + + bmg_df, val, failed_files = parse_bmg_files(tuple(bmg_files)) + ok_names = [name for name, _ in bmg_files if name not in failed_files] + nok_entries = [f"{name}: {error}" for name, error in failed_files.items()] + + stream = io.BytesIO() + np.savez_compressed(stream, val) + stream.seek(0) + file_storage.save_file(f"{stored_uuid}_bmg_val.npz", stream.read()) + file_storage.save_file(f"{stored_uuid}_bmg_df.pq", bmg_df.to_parquet()) return ( - make_file_list_component(names, [], 2), + make_file_list_component(ok_names, nok_entries, 2), no_update, + make_new_upload_view( + f"Files uploaded. Success: {len(ok_names)}. Skipped {len(nok_entries)}", + "new BMG files (.txt)", + ), stored_uuid, False, ) +def upload_settings_data(content: str | None, name: str | None): + """ + Callback for file upload. It saves the in local storage for other components. + + :param content: base64 encoded file content + :param name: filename + :return: dict with loaded data + """ + if not content: + return no_update + loaded_data = load_data_from_json(content, name) + color = "success" + text = "Settings uploaded successfully" + settings_keys = ["statistics_stage", "summary_stage"] + if loaded_data == None or not set(settings_keys).issubset(loaded_data.keys()): + color = "danger" + text = ( + f"Invalid settings uploaded: the file should contain {settings_keys} keys." + ) + return ( + loaded_data, + True, + html.Span(text), + color, + make_new_upload_view(text, "new Settings file (.json)"), + no_update, + ) + + # === STAGE 2 === DISPLAYED_PLATES = 12 @@ -215,11 +255,58 @@ def on_outlier_purge_stage_entry( ) +HEATMAP_PLOT_REPORT_HEIGHT_PER_ROW = 250 +HEATMAP_PLOT_REPORT_COLS = 5 + + +def on_export_plots_button_click( + n_clicks: int, + stored_uuid: str, + file_storage: FileStorage, +) -> dict[str, str]: + """ + Callback for the export plots button. Exports all heatmap plots to a single html file. + + :param n_clicks: number of clicks + :param stored_uuid: uuid of the stored data + :param file_storage: storage object + :return: dictionary containing the html file with plots and its name + """ + if n_clicks is None: + return no_update + + bmg_df = pd.read_parquet( + pa.BufferReader(file_storage.read_file(f"{stored_uuid}_bmg_df.pq")) + ) + bmg_vals = np.load( + io.BytesIO(file_storage.read_file(f"{stored_uuid}_bmg_val.npz")) + )["arr_0"] + n_rows, remainder = divmod(bmg_vals.shape[0], N_COLS) + n_rows += bool(remainder) + + fig = visualize_multiple_plates( + bmg_df, bmg_vals, n_rows, HEATMAP_PLOT_REPORT_COLS, free_format=True + ) + fig.update_layout( + height=n_rows * HEATMAP_PLOT_REPORT_HEIGHT_PER_ROW, + coloraxis_showscale=False, + ) + fig.update_annotations( + font_size=16, + ) + as_html = fig.to_html() + filename = f"screening_heatmaps_{datetime.now().strftime('%Y-%m-%dT%H_%M_%S')}.html" + return dict(content=as_html, filename=filename) + + # === STAGE 3 === def on_plates_stats_stage_entry( - current_stage: int, value: float, stored_uuid: str, file_storage: FileStorage + current_stage: int, + value: float, + stored_uuid: str, + file_storage: FileStorage, ) -> tuple[go.Figure, go.Figure, go.Figure, str, str]: """ Callback for the stage 3 entry @@ -240,8 +327,10 @@ def on_plates_stats_stage_entry( raw_vals = file_storage.read_file(f"{stored_uuid}_bmg_val.npz") bmg_vals = np.load(io.BytesIO(raw_vals))["arr_0"] - filtered_df, filtered_vals = filter_low_quality_plates(bmg_df, bmg_vals, value) - num_removed = bmg_df.shape[0] - filtered_df.shape[0] + filtered_df, low_quality_df, filtered_vals = filter_low_quality_plates( + bmg_df, bmg_vals, value + ) + num_removed = len(low_quality_df) control_values_fig = plot_control_values(filtered_df) row_col_fig = plot_row_col_means(filtered_vals) @@ -269,6 +358,31 @@ def on_plates_stats_stage_entry( ) +def on_plates_stats_stage_entry_load_settings( + current_stage: int, + value: float, + saved_data: dict, +) -> float: + """ + Callback for the stage 3 entry + Change value of z-slider if json was loaded + + :param current_stage: current stage index of the process + :param value: z threshold, slider value + :param saved_data: dict with loaded data + :return: value for z-slider + """ + + if current_stage != 2: + return no_update + + z_slider_value = value + if saved_data != None: + z_slider_value = saved_data["statistics_stage"]["z_slider_value"] + + return z_slider_value + + def hide_heatmap_loading(trigger, children): """ Hide the heatmap loading component after the heatmap is loaded @@ -301,7 +415,9 @@ def on_upload_echo_data(contents, names, last_modified, stored_uuid, file_storag f"{stored_uuid}_exceptions_df.pq", exceptions_df.to_parquet() ) - return None # dummy upload echo return + return None, make_new_upload_view( + "Files uploaded", "new ECHO files (.csv)" + ) # dummy upload echo return def on_upload_eos_data(contents, stored_uuid, file_storage): @@ -311,7 +427,9 @@ def on_upload_eos_data(contents, stored_uuid, file_storage): eos_decoded = base64.b64decode(contents.split(",")[1]).decode("utf-8") eos_df = pd.read_csv(io.StringIO(eos_decoded), dtype="str") file_storage.save_file(f"{stored_uuid}_eos_df.pq", eos_df.to_parquet()) - return None # dummy upload eos return + return None, make_new_upload_view( + "File uploaded", "new EOS file (.csv)" + ) # dummy upload eos return def on_upload_echo_eos_data(echo_upload, names, eos_upload, stored_uuid, file_storage): @@ -396,7 +514,7 @@ def on_summary_entry( io.BytesIO(file_storage.read_file(f"{stored_uuid}_bmg_val.npz")) )["arr_0"] - filtered_df, filtered_vals = filter_low_quality_plates( + filtered_df, _, filtered_vals = filter_low_quality_plates( bmg_df, bmg_vals, z_slider["z_slider_value"] ) @@ -465,8 +583,6 @@ def on_summary_entry( summary_stage_datatable, fig_z_score, fig_feature, - -3, # z_score_min, - 3, # z_score_max, False, # min input disabled False, # max input disabled feature_min, @@ -481,6 +597,48 @@ def on_summary_entry( ) +def on_summary_entry_load_settings( + current_stage: int, + z_score_min, + z_score_max, + feature_min, + feature_max, + saved_data: dict, +) -> float: + """ + Callback for the stage 5 entry + Loads the data from loaded settings and change values for z-score or feature based on key + + :param current_stage: current stage index of the process + :param z_score_min: min z-score value + :param z_score_max: max z-score value + :param feature_min: min feature value + :param feature_max: max feature value + :param saved_data: dict with loaded data + :return min z-score value + :return max z-score value + :return min feature value + :return max feature value + """ + + if current_stage != 4: + return no_update + + z_score_min_value = z_score_min + z_score_max_value = z_score_max + feature_min_value = feature_min + feature_max_value = feature_max + if saved_data != None: + if saved_data["summary_stage"]["key"] == "activation": + feature_min_value = saved_data["summary_stage"]["key_min"] + feature_max_value = saved_data["summary_stage"]["key_max"] + elif saved_data["summary_stage"]["key"] == "z_score": + z_score_min_value = saved_data["summary_stage"]["key_min"] + z_score_max_value = saved_data["summary_stage"]["key_max"] + + return z_score_min_value, z_score_max_value, feature_min_value, feature_max_value + + def on_filter_radio_or_range_update( key: str, z_score_min: float, @@ -632,6 +790,38 @@ def on_save_results_click( return dcc.send_data_frame(echo_bmg_combined_df.to_csv, filename) +def on_save_low_quality_plates_click( + n_clicks: int, + stored_uuid: str, + z_slider: float, + file_storage: FileStorage, +) -> None: + """ + Callback for the save exceptions button + + :param n_clicks: number of clicks + :param stored_uuid: uuid of the stored data + :param file_storage: storage object + :return: None + """ + z_slider = z_slider["z_slider_value"] + filename = f"screening_low_quality_plates_{datetime.now().strftime('%Y-%m-%d')}.csv" + raw_bmg = file_storage.read_file(f"{stored_uuid}_bmg_df.pq") + bmg_df = pd.read_parquet(pa.BufferReader(raw_bmg)) + raw_vals = file_storage.read_file(f"{stored_uuid}_bmg_val.npz") + bmg_vals = np.load(io.BytesIO(raw_vals))["arr_0"] + + _, low_quality_df, _ = filter_low_quality_plates(bmg_df, bmg_vals, z_slider) + low_quality_df = low_quality_df.rename( + columns={ + "barcode": "Plate Barcode", + "z_factor": f"Z factor value (lower bound: {z_slider})", + } + ) + + return dcc.send_data_frame(low_quality_df.to_csv, filename) + + def on_save_exceptions_click( n_clicks: int, stored_uuid: str, @@ -665,15 +855,7 @@ def on_report_generate_button_click( report_data_second_stage.update(report_data_third_stage) report_data_second_stage.update(report_data_screening_summary_plots) jinja_template = generate_jinja_report(report_data_second_stage) - return html.Div( - className="col", - children=[ - html.H5( - className="text-center", - children=f"Report generated", - ), - ], - ), dict(content=jinja_template, filename=filename) + return dict(content=jinja_template, filename=filename) def on_json_generate_button_click( @@ -695,6 +877,7 @@ def register_callbacks(elements, file_storage): [ Output("bmg-filenames", "children"), Output("dummy-upload-bmg-data", "children"), + Output("upload-bmg-data", "children"), Output("user-uuid", "data"), Output({"type": elements["BLOCKER"], "index": 0}, "data"), ], @@ -704,6 +887,17 @@ def register_callbacks(elements, file_storage): State("user-uuid", "data"), )(functools.partial(upload_bmg_data, file_storage=file_storage)) + callback( + Output("loaded-setings-screening", "data"), + Output("alert-upload-settings-screening", "is_open"), + Output("alert-upload-settings-screening-text", "children"), + Output("alert-upload-settings-screening", "color"), + Output("upload-settings-screening", "children"), + Output("dummy-upload-settings-screening", "children"), + Input("upload-settings-screening", "contents"), + Input("upload-settings-screening", "filename"), + )(functools.partial(upload_settings_data)) + callback( Output("heatmap-start-index", "data"), Input("heatmap-previous-btn", "n_clicks"), @@ -749,8 +943,23 @@ def register_callbacks(elements, file_storage): State("user-uuid", "data"), )(functools.partial(on_plates_stats_stage_entry, file_storage=file_storage)) + callback( + Output("download-plates-heatmap", "data"), + Input("heatmaps-export-btn", "n_clicks"), + State("user-uuid", "data"), + prevent_initial_call=True, + )(functools.partial(on_export_plots_button_click, file_storage=file_storage)) + + callback( + Output("z-slider", "value"), + Input(elements["STAGES_STORE"], "data"), + State("z-slider", "value"), + State("loaded-setings-screening", "data"), + )(functools.partial(on_plates_stats_stage_entry_load_settings)) + callback( Output("dummy-upload-echo-data", "children"), + Output("upload-echo-data", "children"), Input("upload-echo-data", "contents"), Input("upload-echo-data", "filename"), Input("upload-echo-data", "last_modified"), @@ -760,6 +969,7 @@ def register_callbacks(elements, file_storage): callback( Output("dummy-upload-eos-mapping", "children"), + Output("upload-eos-mapping", "children"), Input("upload-eos-mapping", "contents"), State("user-uuid", "data"), prevent_initial_call=True, @@ -786,12 +996,10 @@ def register_callbacks(elements, file_storage): Output("compounds-data-table", "children"), Output("z-score-plot", "figure"), Output("feature-plot", "figure"), - Output("z-score-min-input", "value"), - Output("z-score-max-input", "value"), Output("z-score-min-input", "disabled"), Output("z-score-max-input", "disabled"), - Output("feature-min-input", "value"), - Output("feature-max-input", "value"), + Output("feature-min-input", "value", allow_duplicate=True), + Output("feature-max-input", "value", allow_duplicate=True), Output("feature-min-input", "disabled"), Output("feature-max-input", "disabled"), Output("compounds-data-subtitle", "children"), @@ -803,7 +1011,22 @@ def register_callbacks(elements, file_storage): State("user-uuid", "data"), State("z-slider-value", "data"), State("activation-inhibition-screening-options", "data"), + prevent_initial_call=True, )(functools.partial(on_summary_entry, file_storage=file_storage)) + + callback( + Output("z-score-min-input", "value"), + Output("z-score-max-input", "value"), + Output("feature-min-input", "value"), + Output("feature-max-input", "value"), + Input(elements["STAGES_STORE"], "data"), + State("z-score-min-input", "value"), + State("z-score-max-input", "value"), + State("feature-min-input", "value"), + State("feature-max-input", "value"), + State("loaded-setings-screening", "data"), + )(functools.partial(on_summary_entry_load_settings)) + # Z-SCORE callback( Output("z-score-plot", "figure", allow_duplicate=True), @@ -838,6 +1061,13 @@ def register_callbacks(elements, file_storage): State("report-data-csv", "data"), prevent_initial_call=True, )(functools.partial(on_save_results_click, file_storage=file_storage)) + callback( + Output("download-low-quality-plates-csv", "data"), + Input("save-low-quality-plates-button", "n_clicks"), + State("user-uuid", "data"), + State("z-slider-value", "data"), + prevent_initial_call=True, + )(functools.partial(on_save_low_quality_plates_click, file_storage=file_storage)) callback( Output("download-exceptions-csv", "data"), Input("save-exceptions-button", "n_clicks"), @@ -845,7 +1075,6 @@ def register_callbacks(elements, file_storage): prevent_initial_call=True, )(functools.partial(on_save_exceptions_click, file_storage=file_storage)) callback( - Output("report_callback_receiver", "children"), Output("download-html-raport", "data"), Input("generate-report-button", "n_clicks"), State("report-data-second-stage", "data"), diff --git a/dashboard/pages/screening/report/report.html b/dashboard/pages/screening/report/report.html index 817dfa7..8d32001 100644 --- a/dashboard/pages/screening/report/report.html +++ b/dashboard/pages/screening/report/report.html @@ -30,21 +30,71 @@ font-weight: bold; font-size: 24px; } + + header { + background-color: black; + color: white; + text-align: center; + display: flex; + justify-content: space-between; + align-items: center; + padding: 3px; + } + + .logopart1 { + margin-left: 66px; + font-weight: bold; + font-size: 52px; + } + + .logopart2 { + font-size: 25px; + font-weight: 200; + font-style: italic; + } + + .title { + font-size: 65px; + font-style: italic; + margin-left: 205px; + } + + .data { + font-size: 20px; + font-style: italic; + position: absolute; + top: 80px; + right: 30px; + } + + .logo { + text-decoration: underline; + text-decoration-thickness: 0.1em; + text-decoration-color: blue; + } - Report + Screening Results -

- Report {{current_day}} -

-

- {{current_time}} -

+
+
+ +
+ Screening Results +
+
+ Generated {{current_day}} +
+
+

1. Outliers preview

diff --git a/dashboard/pages/screening/stages/s1_bmg_input.py b/dashboard/pages/screening/stages/s1_bmg_input.py index cff008b..ba2aa89 100644 --- a/dashboard/pages/screening/stages/s1_bmg_input.py +++ b/dashboard/pages/screening/stages/s1_bmg_input.py @@ -1,4 +1,5 @@ from dash import dcc, html +import dash_bootstrap_components as dbc BMG_DESC = """BMG files in ".txt" format should be in the form of two columns, where the first column contains the well unique to the plate, e.g. A02, M13, P24, etc. @@ -13,24 +14,61 @@ html.Div( children=[ html.P(BMG_DESC, className="text-justify"), - dcc.Loading( + html.Div( children=[ - dcc.Upload( - id="upload-bmg-data", - accept=".txt", - children=html.Div( - [ - "Drag and Drop or ", - html.A("Select", className="select-file"), - " BMG files", - ] - ), - multiple=True, - className="text-center upload-box", + dcc.Loading( + children=[ + dcc.Upload( + id="upload-bmg-data", + accept=".txt", + children=html.Div( + [ + "Drag and Drop or ", + html.A("Select", className="select-file"), + " BMG files (.txt)", + ] + ), + multiple=True, + className="text-center upload-box", + ), + html.Div( + id="dummy-upload-bmg-data", + className="p-1", + ), + ], + type="circle", ), - html.Div(id="dummy-upload-bmg-data"), - ], - type="circle", + dcc.Loading( + children=[ + dcc.Upload( + id="upload-settings-screening", + accept=".json", + children=html.Div( + [ + "Drag and Drop or ", + html.A("Select", className="select-file"), + " Settings for screening (.json)", + ] + ), + multiple=False, + className="text-center upload-box", + ), + html.Div( + id="dummy-upload-settings-screening", + className="p-1", + ), + ], + type="circle", + ), + dbc.Alert( + html.Div(id="alert-upload-settings-screening-text"), + id="alert-upload-settings-screening", + dismissable=True, + is_open=False, + duration=4000, + className="m-1", + ), + ] ), ], className="grid-2-1", diff --git a/dashboard/pages/screening/stages/s2_outliers_purging.py b/dashboard/pages/screening/stages/s2_outliers_purging.py index 1427a9c..ddaf32e 100644 --- a/dashboard/pages/screening/stages/s2_outliers_purging.py +++ b/dashboard/pages/screening/stages/s2_outliers_purging.py @@ -131,7 +131,19 @@ className="d-flex flex-row justify-content-between align-items-center", children=[ html.Div( + className="d-flex flex-row gap-3 align-items-center", children=[ + dcc.Loading( + html.Button( + id="heatmaps-export-btn", + children=[ + "Export Plates Plots", + dcc.Download(id="download-plates-heatmap"), + ], + className="btn btn-primary", + ), + type="circle", + ), html.Span( className="mx-2", children=[ @@ -174,7 +186,7 @@ ), ], ), - ] + ], ), annotate_with_tooltip( html.Div( diff --git a/dashboard/pages/screening/stages/s4_echo_input.py b/dashboard/pages/screening/stages/s4_echo_input.py index f60c406..9120926 100644 --- a/dashboard/pages/screening/stages/s4_echo_input.py +++ b/dashboard/pages/screening/stages/s4_echo_input.py @@ -34,7 +34,7 @@ [ "Drag and Drop or ", html.A("Select", className="select-file"), - " ECHO files", + " ECHO files (.cvs)", ] ), multiple=True, @@ -68,7 +68,7 @@ [ "Drag and Drop or ", html.A("Select", className="select-file"), - " EOS mapping file", + " EOS mapping file (.csv)", ] ), multiple=False, diff --git a/dashboard/pages/screening/stages/s5_summary.py b/dashboard/pages/screening/stages/s5_summary.py index c57bce9..bf0bdb9 100644 --- a/dashboard/pages/screening/stages/s5_summary.py +++ b/dashboard/pages/screening/stages/s5_summary.py @@ -48,6 +48,7 @@ className="stats-input", step=0.00001, disabled=True, + value=-3, ), ], ), @@ -62,6 +63,7 @@ step=0.00001, className="stats-input", disabled=True, + value=3, ), ], ), diff --git a/dashboard/pages/screening/stages/s6_report.py b/dashboard/pages/screening/stages/s6_report.py index f1148af..c059f92 100644 --- a/dashboard/pages/screening/stages/s6_report.py +++ b/dashboard/pages/screening/stages/s6_report.py @@ -33,6 +33,31 @@ ), ], ), + html.Div( + className="row mt-2", + children=[ + html.Div( + className="col", + children=[ + html.Div( + className="d-flex justify-content-center", + children=[ + html.Button( + make_download_button_text( + "Save low quality plates as CSV" + ), + className="btn btn-primary btn-lg btn-block btn-report", + id="save-low-quality-plates-button", + ), + dcc.Download( + id="download-low-quality-plates-csv" + ), + ], + ), + ], + ), + ], + ), html.Div( className="row mt-2", children=[ @@ -104,9 +129,6 @@ ), ], ), - html.Div( - id="report_callback_receiver", - ), ], ), ], diff --git a/dashboard/visualization/plots.py b/dashboard/visualization/plots.py index f44f331..01fe38b 100644 --- a/dashboard/visualization/plots.py +++ b/dashboard/visualization/plots.py @@ -175,7 +175,11 @@ def make_projection_plot( def visualize_multiple_plates( - df: pd.DataFrame, plate_array: np.ndarray, rows: int = 3, cols: int = 3 + df: pd.DataFrame, + plate_array: np.ndarray, + rows: int = 3, + cols: int = 3, + free_format: bool = False, ) -> go.Figure: """ Visualize plate values on subplots 3x3 @@ -184,14 +188,20 @@ def visualize_multiple_plates( :param plate_array: array with plate values :param rows: number of rows in plot grid :param cols: number of cols in plot grid + :param free_format: whether to use free format for export :return: plot with visualized plates """ + extra_args = { + "horizontal_spacing": 0.01, + "vertical_spacing": 0.02, + } + if not free_format: + extra_args["vertical_spacing"] = 0.05 fig = make_subplots( rows, cols, - horizontal_spacing=0.01, - vertical_spacing=0.05, subplot_titles=df.barcode.to_list(), + **extra_args, ) ids = product(list(range(1, rows + 1)), list(range(1, cols + 1))) for i, p, plate in zip(range(1, rows * cols + 1), ids, plate_array): @@ -206,18 +216,19 @@ def visualize_multiple_plates( p[1], ) - fig.update_layout( - { - f"xaxis{i}": {"fixedrange": True, "showgrid": False}, - f"yaxis{i}": { - "fixedrange": True, - "showgrid": False, - "scaleanchor": f"x{i}", - "autorange": "reversed", - }, - "autosize": True, - } - ) + if not free_format: + fig.update_layout( + { + f"xaxis{i}": {"fixedrange": True, "showgrid": False}, + f"yaxis{i}": { + "fixedrange": True, + "showgrid": False, + "scaleanchor": f"x{i}", + "autorange": "reversed", + }, + "autosize": True, + } + ) fig.update_layout( coloraxis={"colorscale": "viridis"}, @@ -598,7 +609,9 @@ def concentration_confirmatory_plot( return fig -def concentration_plot(df: pd.DataFrame, reaction_type: str) -> go.Figure: +def concentration_plot( + df: pd.DataFrame, reaction_type: str, line_1: float = 0, line_2: float = 100 +) -> go.Figure: """ Plot activation/inhibition values for each compound by concentration @@ -608,7 +621,7 @@ def concentration_plot(df: pd.DataFrame, reaction_type: str) -> go.Figure: """ fig = go.Figure() # NOTE: to clarify - value_by_conc = df.pivot_table(f"% {reaction_type}_x", "EOS", "Concentration") + value_by_conc = df.pivot_table(f"% {reaction_type}_0", "EOS", "Concentration") for _, row in value_by_conc.iterrows(): fig.add_trace( go.Scatter( @@ -621,6 +634,8 @@ def concentration_plot(df: pd.DataFrame, reaction_type: str) -> go.Figure: text=[str(row.name), str(row.name), str(row.name)], ) ) + fig.add_hline(y=line_1, line_dash="dash") + fig.add_hline(y=line_2, line_dash="dash") fig.update_layout( title_text="Concentrations", xaxis_title="Concentration [uM]", diff --git a/dashboard/visualization/text_tables.py b/dashboard/visualization/text_tables.py index 3b7f5e9..d6f8a69 100644 --- a/dashboard/visualization/text_tables.py +++ b/dashboard/visualization/text_tables.py @@ -1,5 +1,4 @@ import pandas as pd - from dash import dash_table, html from dash.dash_table.Format import Format, Scheme from sklearn.decomposition import PCA @@ -176,7 +175,7 @@ def table_from_df(df: pd.DataFrame, table_id: str) -> html.Div: ) -def pca_summary(pca: PCA, activation_columns: list[str]): +def pca_summary(pca: PCA, activation_columns: list[str]) -> html.Details: """ Construct a summary of PCA projection. :param pca: PCA object diff --git a/tests/bmg_plate_test.py b/tests/bmg_plate_test.py index 89e3bfa..a406563 100644 --- a/tests/bmg_plate_test.py +++ b/tests/bmg_plate_test.py @@ -67,6 +67,10 @@ def test_get_activation_inhibition_zscore_dict(df_stats): def test_filter_low_quality_plates(df_stats): values = np.array([[5, 3, 3], [0, 1, 0]]) - quality_df, quality_plates = filter_low_quality_plates(df_stats, values, -2.5) + ( + quality_df, + _, + quality_plates, + ) = filter_low_quality_plates(df_stats, values, -2.5) df_diff = pd.concat([df_stats, quality_df]).drop_duplicates(keep=False) assert df_diff.empty and np.array_equal(quality_plates, values)