From b0313fab1bc8486a2754f1102d272b1727d26b46 Mon Sep 17 00:00:00 2001 From: jalew188 Date: Thu, 13 Jun 2024 14:20:03 +0200 Subject: [PATCH 1/7] ADD docs for thermo.py and ms_data_base.py --- alpharaw/ms_data_base.py | 121 ++++++++++++++++++++++++++++++++++----- alpharaw/thermo.py | 69 +++++++++++----------- 2 files changed, 140 insertions(+), 50 deletions(-) diff --git a/alpharaw/ms_data_base.py b/alpharaw/ms_data_base.py index e663df5..a7bcac5 100644 --- a/alpharaw/ms_data_base.py +++ b/alpharaw/ms_data_base.py @@ -6,7 +6,14 @@ class MSData_Base: """ - The base data structure for MS Data, other MSData loader inherit + The base data structure for MS Data, other MSData loader inherit this class. + + Parameters + ---------- + centroided : bool, optional + If centroiding the peak data, by default True + save_as_hdf : bool, optional + If automatically save the data into HDF5 format, by default False """ column_dtypes = { @@ -25,7 +32,7 @@ class MSData_Base: """ Spectrum dataframe containing the following columns: - - `rt` (float64): in minutes + - `rt` (float64): in minutes. `rt_sec` will be RT in seconds. - `precursor_mz` (float64): mono_mz (DDA) or isolation center mz - `isolation_lower_mz` (float64): left of the isolation window - `isolation_upper_mz` (float64): right of the isolation window @@ -62,15 +69,12 @@ class MSData_Base: "FT", "TOF", ] + """ + Some spectrum infomation in str format that can be mapped into unique token IDs. + Token IDs are better for storage in HDF5 format. + """ def __init__(self, centroided: bool = True, save_as_hdf: bool = False, **kwargs): - """ - Parameters - ---------- - centroided : bool, optional - if peaks will be centroided after loading, - by default True - """ # A spectrum contains peaks self.spectrum_df: pd.DataFrame = pd.DataFrame() # A peak contains mz, intensity, and ... @@ -82,9 +86,19 @@ def __init__(self, centroided: bool = True, save_as_hdf: bool = False, **kwargs) self.file_type = "" self.instrument = "none" - def _get_term_id(self, terminology: str): + def _get_term_id(self, terminology: str)->int: """ - Get terminology id from :data:`self.vocab`, -1 if not exist. + Get terminology ID from :attr:`.MSData_Base.vocab`, -1 if not exist. + + Parameters + ---------- + terminology : str + The terminology name. + + Returns + ------- + int + Terminology ID, which is the index in :attr:`.MSData_Base.vocab` """ try: return self.vocab.index(terminology) @@ -100,15 +114,31 @@ def raw_file_path(self, _path: str): self._raw_file_path = _path def import_raw(self, _path: str): + """ + Import a raw file. It involves three steps: + ``` + raw_data_dict = self._import(_path) + self._set_dataframes(raw_data_dict) + self._check_df() + ``` + + Parameters + ---------- + _path : str + Raw file path + """ self.raw_file_path = _path - raw_data = self._import(_path) - self._set_dataframes(raw_data) + raw_data_dict = self._import(_path) + self._set_dataframes(raw_data_dict) self._check_df() if self._save_as_hdf: self.save_hdf(_path + ".hdf") def load_raw(self, _path: str): + """ + Wrapper of :func:`.MSData_Base.import_raw` + """ self.import_raw(_path) def _save_meta_to_hdf(self, hdf: HDF_File): @@ -128,6 +158,14 @@ def _load_meta_from_hdf(self, hdf: HDF_File): self.instrument = hdf.ms_data.meta.instrument def save_hdf(self, _path: str): + """ + Save data into HDF5 file + + Parameters + ---------- + _path : str + HDF5 file path + """ hdf = HDF_File(_path, read_only=False, truncate=True, delete_existing=True) hdf.ms_data = {"spectrum_df": self.spectrum_df, "peak_df": self.peak_df} @@ -135,6 +173,14 @@ def save_hdf(self, _path: str): self._save_meta_to_hdf(hdf) def load_hdf(self, _path: str): + """ + Load data from HDF5 file. + + Parameters + ---------- + _path : str + HDF5 file path. + """ hdf = HDF_File(_path, read_only=True, truncate=False, delete_existing=False) self.spectrum_df = hdf.ms_data.spectrum_df.values @@ -144,6 +190,9 @@ def load_hdf(self, _path: str): self._load_meta_from_hdf(hdf) def reset_spec_idxes(self): + """ + Reset spec indexes to make sure spec_idx values are continuous ranging from 0 to N. + """ self.spectrum_df.reset_index(drop=True, inplace=True) self.spectrum_df["spec_idx"] = self.spectrum_df.index.values @@ -200,6 +249,14 @@ def create_spectrum_df( self, spectrum_num: int, ): + """ + Create a empty spectrum dataframe from the number of spectra. + + Parameters + ---------- + spectrum_num : int + The number of spectra. + """ self.spectrum_df = pd.DataFrame(index=np.arange(spectrum_num, dtype=np.int64)) self.spectrum_df["spec_idx"] = self.spectrum_df.index.values @@ -345,6 +402,11 @@ def index_ragged_list(ragged_list: list) -> np.ndarray: class MSData_HDF(MSData_Base): + """ + Wrapper of reader for alpharaw's HDF5 spectrum file. + This class regiesters as "alpharaw", "raw.hdf", "alpharaw_hdf", "hdf" and "hdf5" + in :data:`ms_reader_provider` instance. + """ def import_raw(self, _path: str): self.raw_file_path = _path self.load_hdf(_path) @@ -356,20 +418,49 @@ class MSReaderProvider: def __init__(self): self.ms_reader_dict = {} - def register_reader(self, ms2_type: str, reader_class): + def register_reader(self, ms2_type: str, reader_class:type): + """ + Register a new reader for `ms_type` format with `reader_class`. + + Parameters + ---------- + file_type : str + AlphaRaw supported MS file types. + reader_class : type + AlphaRaw supported MS class types. + """ self.ms_reader_dict[ms2_type.lower()] = reader_class def get_reader( self, file_type: str, *, centroided: bool = True, **kwargs ) -> MSData_Base: + """ + Get the MS reader. + + Parameters + ---------- + file_type : str + AlphaRaw supported MS file types. + centroided : bool, optional + If centroiding the data, by default True. + + Returns + ------- + MSData_Base + Instance of corresponding sub-class of `MSData_Base`. + """ file_type = file_type.lower() if file_type not in self.ms_reader_dict: return None else: return self.ms_reader_dict[file_type](centroided=centroided, **kwargs) - ms_reader_provider = MSReaderProvider() +""" +MS data register (:class:`.MSReaderProvider`) performs as a factory to +produce different readers for different file formats. +""" + ms_reader_provider.register_reader("alpharaw", MSData_HDF) ms_reader_provider.register_reader("raw.hdf", MSData_HDF) ms_reader_provider.register_reader("alpharaw_hdf", MSData_HDF) diff --git a/alpharaw/thermo.py b/alpharaw/thermo.py index 80b2b4b..1dc9a04 100644 --- a/alpharaw/thermo.py +++ b/alpharaw/thermo.py @@ -14,6 +14,7 @@ ms_reader_provider, ) +#: These thermo spectrum items can be only accessed by trailer dict using RawFileReader APIs. __trailer_extra_list__ = [ "injection_time", "cv", @@ -24,6 +25,8 @@ "funnel_rf_level", "faims_cv", ] + +#: The auxiliary items and types that can be accessed from thermo RawFileReader. __auxiliary_item_dtypes__ = { "injection_time": np.float32, "cv": np.float32, @@ -47,6 +50,23 @@ class ThermoRawData(MSData_Base): """ Loading Thermo Raw data as MSData_Base data structure. + Register "thermo" and "thermo_raw" in :data:`ms_reader_provider`. + + Parameters + ---------- + centroided : bool, optional + If peaks will be centroided after loading. By defaults True. + process_count : int, optional + number of spectra to load in each batch, by default 10. + mp_batch_size : int, optional + automatically save hdf after load raw data, by default 5000. + save_as_hdf : bool, optional + is DDA data, by default False. + dda : bool, optional + _description_, by default False. + auxiliary_items : list, optional + Additional spectrum items, candidates are in :data:`__auxiliary_item_dtypes__`. + By default []. """ def __init__( @@ -59,34 +79,6 @@ def __init__( auxiliary_items: list = [], **kwargs, ): - """ - Parameters - ---------- - centroided : bool, default = True - if peaks will be centroided after loading, - by default True - - process_count : int, default = 8 - number of processes to use for loading - - mp_batch_size : int, default = 10000 - number of spectra to load in each batch - - save_as_hdf : bool, default = False - automatically save hdf after load raw data. - - dda : bool, default = False - is DDA data - - auxiliary_items : list, default = [] - Candidates are: - "injection_time", "cv", - "max_ion_time", "agc_target", "energy_ev", - "injection_optics_settling_time", - "funnel_rf_level", "faims_cv", - "detector", "activation", "analyzer", - "detector_id", "activation_id", "analyzer_id", - """ super().__init__(centroided, save_as_hdf=save_as_hdf, **kwargs) self.file_type = "thermo" self.process_count = process_count @@ -99,6 +91,19 @@ def _import( self, raw_file_path: str, ) -> dict: + """ + Re-implementation of :func:`MSData_Base._import` to enable :func:`.MSData_Base.import_raw`. + + Parameters + ---------- + raw_file_path : str + File path of the raw data. + + Returns + ------- + dict + Spectrum information in a temporary dict format. + """ rawfile = pyrawfilereader.RawFileReader(raw_file_path) self.creation_time = rawfile.GetCreationDate() @@ -185,13 +190,7 @@ def _import_batch( is dda data. auxiliary_items : list - Candidates: - "injection_time", "cv", - "max_ion_time", "agc_target", "energy_ev", - "injection_optics_settling_time", - "funnel_rf_level", "faims_cv", - "activation", "analyzer", - "activation_id", "analyzer_id", + Candidates are in :data:`__auxiliary_item_dtypes__`. Returns ------- From 0c01135a312ad1f223ea303f3ecdf7aa20cfb2b7 Mon Sep 17 00:00:00 2001 From: jalew188 Date: Thu, 13 Jun 2024 14:23:12 +0200 Subject: [PATCH 2/7] update docs for get_reader() --- alpharaw/ms_data_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/alpharaw/ms_data_base.py b/alpharaw/ms_data_base.py index a7bcac5..edc777d 100644 --- a/alpharaw/ms_data_base.py +++ b/alpharaw/ms_data_base.py @@ -435,7 +435,7 @@ def get_reader( self, file_type: str, *, centroided: bool = True, **kwargs ) -> MSData_Base: """ - Get the MS reader. + Get the MS reader for the given `file_type`. Parameters ---------- From 6ca06c03fb665267b3fb9149e6b1987d0851f8b4 Mon Sep 17 00:00:00 2001 From: jalew188 Date: Thu, 13 Jun 2024 14:29:55 +0200 Subject: [PATCH 3/7] FIX by pre-commit run --all-files --- alpharaw/ms_data_base.py | 10 ++++++---- alpharaw/thermo.py | 2 +- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/alpharaw/ms_data_base.py b/alpharaw/ms_data_base.py index edc777d..9d9238e 100644 --- a/alpharaw/ms_data_base.py +++ b/alpharaw/ms_data_base.py @@ -70,7 +70,7 @@ class MSData_Base: "TOF", ] """ - Some spectrum infomation in str format that can be mapped into unique token IDs. + Some spectrum infomation in str format that can be mapped into unique token IDs. Token IDs are better for storage in HDF5 format. """ @@ -86,7 +86,7 @@ def __init__(self, centroided: bool = True, save_as_hdf: bool = False, **kwargs) self.file_type = "" self.instrument = "none" - def _get_term_id(self, terminology: str)->int: + def _get_term_id(self, terminology: str) -> int: """ Get terminology ID from :attr:`.MSData_Base.vocab`, -1 if not exist. @@ -407,6 +407,7 @@ class MSData_HDF(MSData_Base): This class regiesters as "alpharaw", "raw.hdf", "alpharaw_hdf", "hdf" and "hdf5" in :data:`ms_reader_provider` instance. """ + def import_raw(self, _path: str): self.raw_file_path = _path self.load_hdf(_path) @@ -418,7 +419,7 @@ class MSReaderProvider: def __init__(self): self.ms_reader_dict = {} - def register_reader(self, ms2_type: str, reader_class:type): + def register_reader(self, ms2_type: str, reader_class: type): """ Register a new reader for `ms_type` format with `reader_class`. @@ -455,9 +456,10 @@ def get_reader( else: return self.ms_reader_dict[file_type](centroided=centroided, **kwargs) + ms_reader_provider = MSReaderProvider() """ -MS data register (:class:`.MSReaderProvider`) performs as a factory to +MS data register (:class:`.MSReaderProvider`) performs as a factory to produce different readers for different file formats. """ diff --git a/alpharaw/thermo.py b/alpharaw/thermo.py index 1dc9a04..f8ddadb 100644 --- a/alpharaw/thermo.py +++ b/alpharaw/thermo.py @@ -65,7 +65,7 @@ class ThermoRawData(MSData_Base): dda : bool, optional _description_, by default False. auxiliary_items : list, optional - Additional spectrum items, candidates are in :data:`__auxiliary_item_dtypes__`. + Additional spectrum items, candidates are in :data:`__auxiliary_item_dtypes__`. By default []. """ From 518d927001e0e34f318cef8a53a022b7b5e5fb7b Mon Sep 17 00:00:00 2001 From: jalew188 Date: Fri, 14 Jun 2024 12:55:55 +0200 Subject: [PATCH 4/7] FIX typos and unclear docs after review --- alpharaw/ms_data_base.py | 71 ++++++++++++++++++++-------------------- alpharaw/thermo.py | 2 +- 2 files changed, 37 insertions(+), 36 deletions(-) diff --git a/alpharaw/ms_data_base.py b/alpharaw/ms_data_base.py index 9d9238e..baac8f7 100644 --- a/alpharaw/ms_data_base.py +++ b/alpharaw/ms_data_base.py @@ -6,7 +6,7 @@ class MSData_Base: """ - The base data structure for MS Data, other MSData loader inherit this class. + The base data structure for MS RAW Data, other MSData loaders inherit this class. Parameters ---------- @@ -32,7 +32,7 @@ class MSData_Base: """ Spectrum dataframe containing the following columns: - - `rt` (float64): in minutes. `rt_sec` will be RT in seconds. + - `rt` (float64): in minutes. `rt_sec` will be RT in seconds, which is not included by default. - `precursor_mz` (float64): mono_mz (DDA) or isolation center mz - `isolation_lower_mz` (float64): left of the isolation window - `isolation_upper_mz` (float64): right of the isolation window @@ -70,7 +70,8 @@ class MSData_Base: "TOF", ] """ - Some spectrum infomation in str format that can be mapped into unique token IDs. + These spectrum infomation items in str format can be one-to-one mapped into + unique token IDs (indices), for exampel "CID"=0, "HCD"=1, ... Token IDs are better for storage in HDF5 format. """ @@ -93,12 +94,12 @@ def _get_term_id(self, terminology: str) -> int: Parameters ---------- terminology : str - The terminology name. + The terminology name from :attr:`.MSData_Base.vocab`, such as "CID", "HCD", ... Returns ------- int - Terminology ID, which is the index in :attr:`.MSData_Base.vocab` + Terminology ID, which is the index in :attr:`.MSData_Base.vocab`. """ try: return self.vocab.index(terminology) @@ -110,36 +111,36 @@ def raw_file_path(self) -> str: return self._raw_file_path @raw_file_path.setter - def raw_file_path(self, _path: str): - self._raw_file_path = _path + def raw_file_path(self, raw_file_path: str): + self._raw_file_path = raw_file_path - def import_raw(self, _path: str): + def import_raw(self, raw_file_path: str): """ Import a raw file. It involves three steps: ``` - raw_data_dict = self._import(_path) + raw_data_dict = self._import(raw_file_path) self._set_dataframes(raw_data_dict) self._check_df() ``` Parameters ---------- - _path : str - Raw file path + raw_file_path : str + Absolute or relative path of the raw file. """ - self.raw_file_path = _path - raw_data_dict = self._import(_path) + self.raw_file_path = raw_file_path + raw_data_dict = self._import(raw_file_path) self._set_dataframes(raw_data_dict) self._check_df() if self._save_as_hdf: - self.save_hdf(_path + ".hdf") + self.save_hdf(raw_file_path + ".hdf") - def load_raw(self, _path: str): + def load_raw(self, raw_file_path: str): """ Wrapper of :func:`.MSData_Base.import_raw` """ - self.import_raw(_path) + self.import_raw(raw_file_path) def _save_meta_to_hdf(self, hdf: HDF_File): hdf.ms_data.meta = { @@ -157,31 +158,31 @@ def _load_meta_from_hdf(self, hdf: HDF_File): self.centroided = hdf.ms_data.meta.centroided self.instrument = hdf.ms_data.meta.instrument - def save_hdf(self, _path: str): + def save_hdf(self, hdf_file_path: str): """ Save data into HDF5 file Parameters ---------- - _path : str - HDF5 file path + hdf_file_path : str + Absolute or relative path of HDF5 file. """ - hdf = HDF_File(_path, read_only=False, truncate=True, delete_existing=True) + hdf = HDF_File(hdf_file_path, read_only=False, truncate=True, delete_existing=True) hdf.ms_data = {"spectrum_df": self.spectrum_df, "peak_df": self.peak_df} self._save_meta_to_hdf(hdf) - def load_hdf(self, _path: str): + def load_hdf(self, hdf_file_path: str): """ Load data from HDF5 file. Parameters ---------- - _path : str - HDF5 file path. + hdf_file_path : str + Absolute or relative path of HDF5 file. """ - hdf = HDF_File(_path, read_only=True, truncate=False, delete_existing=False) + hdf = HDF_File(hdf_file_path, read_only=True, truncate=False, delete_existing=False) self.spectrum_df = hdf.ms_data.spectrum_df.values self.peak_df = hdf.ms_data.peak_df.values @@ -404,7 +405,7 @@ def index_ragged_list(ragged_list: list) -> np.ndarray: class MSData_HDF(MSData_Base): """ Wrapper of reader for alpharaw's HDF5 spectrum file. - This class regiesters as "alpharaw", "raw.hdf", "alpharaw_hdf", "hdf" and "hdf5" + This class is registered as "alpharaw", "raw.hdf", "alpharaw_hdf", "hdf" and "hdf5" in :data:`ms_reader_provider` instance. """ @@ -419,28 +420,28 @@ class MSReaderProvider: def __init__(self): self.ms_reader_dict = {} - def register_reader(self, ms2_type: str, reader_class: type): + def register_reader(self, ms_file_type: str, reader_class: type): """ - Register a new reader for `ms_type` format with `reader_class`. + Register a new reader for `ms_file_type` format with `reader_class`. Parameters ---------- - file_type : str + ms_file_type : str AlphaRaw supported MS file types. reader_class : type AlphaRaw supported MS class types. """ - self.ms_reader_dict[ms2_type.lower()] = reader_class + self.ms_reader_dict[ms_file_type.lower()] = reader_class def get_reader( - self, file_type: str, *, centroided: bool = True, **kwargs + self, ms_file_type: str, *, centroided: bool = True, **kwargs ) -> MSData_Base: """ - Get the MS reader for the given `file_type`. + Get the MS reader for the given `ms_file_type`. Parameters ---------- - file_type : str + ms_file_type : str AlphaRaw supported MS file types. centroided : bool, optional If centroiding the data, by default True. @@ -450,11 +451,11 @@ def get_reader( MSData_Base Instance of corresponding sub-class of `MSData_Base`. """ - file_type = file_type.lower() - if file_type not in self.ms_reader_dict: + ms_file_type = ms_file_type.lower() + if ms_file_type not in self.ms_reader_dict: return None else: - return self.ms_reader_dict[file_type](centroided=centroided, **kwargs) + return self.ms_reader_dict[ms_file_type](centroided=centroided, **kwargs) ms_reader_provider = MSReaderProvider() diff --git a/alpharaw/thermo.py b/alpharaw/thermo.py index f8ddadb..73df14a 100644 --- a/alpharaw/thermo.py +++ b/alpharaw/thermo.py @@ -50,7 +50,7 @@ class ThermoRawData(MSData_Base): """ Loading Thermo Raw data as MSData_Base data structure. - Register "thermo" and "thermo_raw" in :data:`ms_reader_provider`. + This class is registered "thermo" and "thermo_raw" in :data:`ms_reader_provider`. Parameters ---------- From 46f48d011ec689acf93b47cf0164d8d5bfd99f64 Mon Sep 17 00:00:00 2001 From: jalew188 Date: Fri, 14 Jun 2024 12:57:15 +0200 Subject: [PATCH 5/7] FIX pre-commit run --all-files --- alpharaw/ms_data_base.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/alpharaw/ms_data_base.py b/alpharaw/ms_data_base.py index baac8f7..4fc480a 100644 --- a/alpharaw/ms_data_base.py +++ b/alpharaw/ms_data_base.py @@ -70,7 +70,7 @@ class MSData_Base: "TOF", ] """ - These spectrum infomation items in str format can be one-to-one mapped into + These spectrum infomation items in str format can be one-to-one mapped into unique token IDs (indices), for exampel "CID"=0, "HCD"=1, ... Token IDs are better for storage in HDF5 format. """ @@ -167,7 +167,9 @@ def save_hdf(self, hdf_file_path: str): hdf_file_path : str Absolute or relative path of HDF5 file. """ - hdf = HDF_File(hdf_file_path, read_only=False, truncate=True, delete_existing=True) + hdf = HDF_File( + hdf_file_path, read_only=False, truncate=True, delete_existing=True + ) hdf.ms_data = {"spectrum_df": self.spectrum_df, "peak_df": self.peak_df} @@ -182,7 +184,9 @@ def load_hdf(self, hdf_file_path: str): hdf_file_path : str Absolute or relative path of HDF5 file. """ - hdf = HDF_File(hdf_file_path, read_only=True, truncate=False, delete_existing=False) + hdf = HDF_File( + hdf_file_path, read_only=True, truncate=False, delete_existing=False + ) self.spectrum_df = hdf.ms_data.spectrum_df.values self.peak_df = hdf.ms_data.peak_df.values From a54c1398f4b07a3a326c73695ec8d1a771b8f92d Mon Sep 17 00:00:00 2001 From: jalew188 Date: Fri, 14 Jun 2024 16:44:52 +0200 Subject: [PATCH 6/7] #53 update more docs --- alpharaw/ms_data_base.py | 34 +++++++++++++++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/alpharaw/ms_data_base.py b/alpharaw/ms_data_base.py index 4fc480a..e5ad4c5 100644 --- a/alpharaw/ms_data_base.py +++ b/alpharaw/ms_data_base.py @@ -201,7 +201,39 @@ def reset_spec_idxes(self): self.spectrum_df.reset_index(drop=True, inplace=True) self.spectrum_df["spec_idx"] = self.spectrum_df.index.values - def _import(self, _path): + def _import(self, _path: str) -> dict: + """ + _summary_ + + Parameters + ---------- + _path : str + _description_ + + Returns + ------- + dict + Example: + ``` + spec_dict = { + "_peak_indices": _peak_indices, + "peak_mz": np.concatenate(mz_values).copy(), + "peak_intensity": np.concatenate(intensity_values).copy(), + "rt": np.array(rt_values).copy(), + "precursor_mz": np.array(precursor_mz_values).copy(), + "precursor_charge": np.array(precursor_charges, dtype=np.int8).copy(), + "isolation_lower_mz": np.array(isolation_mz_lowers).copy(), + "isolation_upper_mz": np.array(isolation_mz_uppers).copy(), + "ms_level": np.array(ms_order_list, dtype=np.int8).copy(), + "nce": np.array(ce_list, dtype=np.float32).copy(), + } + ``` + + Raises + ------ + NotImplementedError + Sub-class of `MSData_Base` must implement this method. + """ raise NotImplementedError(f"{self.__class__} must implement `_import()`") def _set_dataframes(self, raw_data: dict): From 6fbfb40a571619a7ff8b0263c965c94551abdaab Mon Sep 17 00:00:00 2001 From: jalew188 Date: Fri, 14 Jun 2024 16:46:08 +0200 Subject: [PATCH 7/7] #53 update missing docs --- alpharaw/ms_data_base.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/alpharaw/ms_data_base.py b/alpharaw/ms_data_base.py index e5ad4c5..03e7ce6 100644 --- a/alpharaw/ms_data_base.py +++ b/alpharaw/ms_data_base.py @@ -203,12 +203,10 @@ def reset_spec_idxes(self): def _import(self, _path: str) -> dict: """ - _summary_ - Parameters ---------- _path : str - _description_ + Path of raw file. Returns -------