3
3
from logging .handlers import RotatingFileHandler
4
4
from pathlib import Path
5
5
6
+ import pandas as pd
7
+
6
8
from pydicer .config import PyDicerConfig
7
9
from pydicer .constants import CONVERTED_DIR_NAME , PYDICER_DIR_NAME
8
10
14
16
from pydicer .dataset .preparation import PrepareDataset
15
17
from pydicer .analyse .data import AnalyseData
16
18
19
+ from pydicer .utils import read_converted_data , add_structure_name_mapping , copy_doc
20
+
21
+ from pydicer .generate .object import add_object , add_structure_object , add_dose_object
22
+ from pydicer .generate .segmentation import (
23
+ read_all_segmentation_logs ,
24
+ segment_image ,
25
+ segment_dataset ,
26
+ )
27
+
17
28
logger = logging .getLogger ()
18
29
19
30
20
31
class PyDicer :
32
+ """The PyDicer class provides easy access to all the key PyDicer functionality.
33
+
34
+ Args:
35
+ working_directory (str|pathlib.Path, optional): Directory in which data is stored. Defaults
36
+ to ".".
37
+
38
+ :ivar convert: Instance of :class:`~pydicer.convert.data.ConvertData`
39
+ :ivar visualise: Instance of :class:`~pydicer.visualise.data.VisualiseData`
40
+ :ivar dataset: Instance of :class:`~pydicer.dataset.preparation.PrepareDataset`
41
+ :ivar analyse: Instance of :class:`~pydicer.analyse.data.AnalyseData`
42
+ """
43
+
21
44
def __init__ (self , working_directory = "." ):
22
45
23
46
self .working_directory = Path (working_directory )
@@ -135,8 +158,8 @@ def preprocess(self, force=True):
135
158
if len (self .dicom_directories ) == 0 :
136
159
raise ValueError ("No DICOM input locations set. Add one using the add_input function." )
137
160
138
- pd = PreprocessData (self .working_directory )
139
- pd .preprocess (self .dicom_directories , force = force )
161
+ preprocess_data = PreprocessData (self .working_directory )
162
+ preprocess_data .preprocess (self .dicom_directories , force = force )
140
163
141
164
self .preprocessed_data = read_preprocessed_data (self .working_directory )
142
165
@@ -161,33 +184,60 @@ def run_pipeline(self, patient=None, force=True):
161
184
)
162
185
self .analyse .compute_dvh (dataset_name = CONVERTED_DIR_NAME , patient = patient , force = force )
163
186
164
- # Object generation (insert in dataset(s) or all data)
165
- def add_object_to_dataset (
166
- self ,
167
- uid ,
168
- patient_id ,
169
- obj_type ,
170
- modality ,
171
- for_uid = None ,
172
- referenced_sop_instance_uid = None ,
173
- datasets = None ,
174
- ):
175
- """_summary_
187
+ @copy_doc (add_structure_name_mapping , remove_args = ["working_directory" ])
188
+ def add_structure_name_mapping ( # pylint: disable=missing-function-docstring
189
+ self , * args , ** kwargs
190
+ ) -> pd .DataFrame :
176
191
177
- Args:
178
- uid (_type_): _description_
179
- patient_id (_type_): _description_
180
- obj_type (_type_): _description_
181
- modality (_type_): _description_
182
- for_uid (_type_, optional): _description_. Defaults to None.
183
- referenced_sop_instance_uid (_type_, optional): _description_. Defaults to None.
184
- datasets (_type_, optional): _description_. Defaults to None.
185
- """
192
+ return add_structure_name_mapping (
193
+ * args , working_directory = self .working_directory , ** kwargs
194
+ )
195
+
196
+ @copy_doc (read_converted_data , remove_args = ["working_directory" ])
197
+ def read_converted_data ( # pylint: disable=missing-function-docstring
198
+ self , * _ , ** kwargs
199
+ ) -> pd .DataFrame :
200
+
201
+ return read_converted_data (working_directory = self .working_directory , ** kwargs )
202
+
203
+ @copy_doc (add_object , remove_args = ["working_directory" ])
204
+ def add_object ( # pylint: disable=missing-function-docstring
205
+ self , * args , ** kwargs
206
+ ) -> pd .DataFrame :
207
+
208
+ return add_object (self .working_directory , * args , ** kwargs )
209
+
210
+ @copy_doc (add_structure_object , remove_args = ["working_directory" ])
211
+ def add_structure_object ( # pylint: disable=missing-function-docstring
212
+ self , * args , ** kwargs
213
+ ) -> pd .DataFrame :
214
+
215
+ return add_structure_object (self .working_directory , * args , ** kwargs )
216
+
217
+ @copy_doc (add_dose_object , remove_args = ["working_directory" ])
218
+ def add_dose_object ( # pylint: disable=missing-function-docstring
219
+ self , * args , ** kwargs
220
+ ) -> pd .DataFrame :
221
+
222
+ return add_dose_object (self .working_directory , * args , ** kwargs )
223
+
224
+ @copy_doc (read_all_segmentation_logs , remove_args = ["working_directory" ])
225
+ def read_all_segmentation_logs ( # pylint: disable=missing-function-docstring
226
+ self , * args , ** kwargs
227
+ ) -> pd .DataFrame :
228
+
229
+ return read_all_segmentation_logs (self .working_directory , * args , ** kwargs )
186
230
187
- # Check that object folder exists, if not provide instructions for adding
231
+ @copy_doc (segment_image , remove_args = ["working_directory" ])
232
+ def segment_image ( # pylint: disable=missing-function-docstring
233
+ self , * args , ** kwargs
234
+ ) -> pd .DataFrame :
188
235
189
- # Check that no object with uid already exists
236
+ return segment_image ( self . working_directory , * args , ** kwargs )
190
237
191
- # Check that references sop uid exists, only warning if not
238
+ @copy_doc (segment_dataset , remove_args = ["working_directory" ])
239
+ def segment_dataset ( # pylint: disable=missing-function-docstring
240
+ self , * args , ** kwargs
241
+ ) -> pd .DataFrame :
192
242
193
- # Once ready, add to converted.csv for each dataset specified
243
+ return segment_dataset ( self . working_directory , * args , ** kwargs )
0 commit comments