From 3b3fdd2679f0d772ea02ec1e166cd06d6e87cfc5 Mon Sep 17 00:00:00 2001 From: Anisha Keshavan Date: Mon, 15 Apr 2019 15:09:01 -0700 Subject: [PATCH 1/3] enh/wip: added entrypoint to the non-topup option --- dmriprep/cli.py | 30 +++++++++++++++++++++++++----- dmriprep/io.py | 8 ++++---- dmriprep/run.py | 2 +- dmriprep/utils.py | 6 ++++++ 4 files changed, 36 insertions(+), 10 deletions(-) diff --git a/dmriprep/cli.py b/dmriprep/cli.py index 8b45d7e..6a760bd 100644 --- a/dmriprep/cli.py +++ b/dmriprep/cli.py @@ -10,6 +10,7 @@ from . import io from . import run from .data import get_dataset +from .utils import is_bval_bvec_hemispherical # Filter warnings that are visible whenever you import another package that # was compiled against an older numpy than is installed. @@ -69,11 +70,30 @@ def main(participant_label, bids_dir, output_dir, inputs = io.get_bids_files(participant_label, bids_dir) for subject_inputs in inputs: - run.run_dmriprep_pe(**subject_inputs, - working_dir=os.path.join(output_dir, 'scratch'), - out_dir=output_dir, - eddy_niter=eddy_niter, - slice_outlier_threshold=slice_outlier_threshold) + # decide which workflow to run depending on inputs + if (subject_inputs['dwi_file_AP'] and subject_inputs['dwi_file_PA']): + # run the pe dmriprep workflow + run.run_dmriprep_pe(**subject_inputs, + working_dir=os.path.join(output_dir, 'scratch'), + out_dir=output_dir, + eddy_niter=eddy_niter, + slice_outlier_threshold=slice_outlier_threshold) + else: + print(''' + WARNING: no AP/PA files found, so we won't use TOPUP. If this isn't right, check that + your files follow the BIDS format. + ''') + # check hemispheres, to see whether we run eddy or not. + # if we don't run eddy, we use run.run_dmriprep() + is_hemi = is_bval_bvec_hemispherical(subject_inputs['bval_file'], subject_inputs['bvec_file']) + if is_hemi: + # run the dmriprep without topup workflow + raise NotImplementedError('we still need to unwire TOPUP from our workflow') + else: + run.run_dmriprep(**subject_inputs, + working_dir=os.path.join(output_dir, 'scratch'), + out_dir=output_dir,) + return 0 diff --git a/dmriprep/io.py b/dmriprep/io.py index a154387..182579b 100644 --- a/dmriprep/io.py +++ b/dmriprep/io.py @@ -26,14 +26,14 @@ def get_bids_subject_input_files(subject_id, bids_input_directory): suffix='epi', dir='AP', extensions=['.nii', '.nii.gz']) - assert len(ap_file) == 1, 'found {} ap fieldmap files and we need just 1'.format(len(ap_file)) + # assert len(ap_file) == 1, 'found {} ap fieldmap files and we need just 1'.format(len(ap_file)) pa_file = layout.get(subject=subject_id, datatype='fmap', suffix='epi', dir='PA', extensions=['.nii', '.nii.gz']) - assert len(pa_file) == 1, 'found {} pa fieldmap files and we need just 1'.format(len(pa_file)) + # assert len(pa_file) == 1, 'found {} pa fieldmap files and we need just 1'.format(len(pa_file)) dwi_files = layout.get(subject=subject_id, datatype='dwi', suffix='dwi') valid_dwi_files = [] @@ -60,8 +60,8 @@ def get_bids_subject_input_files(subject_id, bids_input_directory): outputs = dict(subject_id="sub-"+subject_id, dwi_file=dwi_file[0], - dwi_file_AP=ap_file[0].path, - dwi_file_PA=pa_file[0].path, + dwi_file_AP=ap_file[0].path if len(ap_file) and len(pa_file) else None, + dwi_file_PA=pa_file[0].path if len(ap_file) and len(pa_file) else None, bvec_file=bvec_file[0], bval_file=bval_file[0], subjects_dir=op.abspath(subjects_dir)) diff --git a/dmriprep/run.py b/dmriprep/run.py index 9ac70d8..4ab0f5a 100644 --- a/dmriprep/run.py +++ b/dmriprep/run.py @@ -4,7 +4,7 @@ def run_dmriprep(dwi_file, bvec_file, bval_file, - subjects_dir, working_dir, out_dir): + subjects_dir, working_dir, out_dir, **kwargs): """ Runs dmriprep for acquisitions with just one PE direction. diff --git a/dmriprep/utils.py b/dmriprep/utils.py index cd92cc1..1ee4927 100644 --- a/dmriprep/utils.py +++ b/dmriprep/utils.py @@ -5,6 +5,8 @@ import itertools import numpy as np +import logging +from dipy.io import read_bvals_bvecs mod_logger = logging.getLogger(__name__) @@ -64,3 +66,7 @@ def is_hemispherical(vecs): else: pole = np.array([0.0, 0.0, 0.0]) return is_hemi, pole + +def is_bval_bvec_hemispherical(bval_file, bvec_file): + _, bvecs = read_bvals_bvecs(bval_file, bvec_file) + return is_hemispherical(bvecs[1:]) \ No newline at end of file From b84a8e7f82206346de13df5fdd2a47c50ca1659f Mon Sep 17 00:00:00 2001 From: Ariel Rokem Date: Mon, 15 Apr 2019 15:54:39 -0700 Subject: [PATCH 2/3] Fix up a few things related to is_hemispherical. --- dmriprep/utils.py | 56 ++++++++++++++++++++++++++++++++++++++++----- tests/test_utils.py | 11 ++++++++- 2 files changed, 60 insertions(+), 7 deletions(-) diff --git a/dmriprep/utils.py b/dmriprep/utils.py index 1ee4927..9096374 100644 --- a/dmriprep/utils.py +++ b/dmriprep/utils.py @@ -6,13 +6,13 @@ import numpy as np import logging -from dipy.io import read_bvals_bvecs +from dipy.core.gradients import gradient_table mod_logger = logging.getLogger(__name__) -def is_hemispherical(vecs): +def is_hemispherical(vecs, atol=10e-4): """Test whether all points on a unit sphere lie in the same hemisphere. Parameters @@ -21,6 +21,10 @@ def is_hemispherical(vecs): 2D numpy array with shape (N, 3) where N is the number of points. All points must lie on the unit sphere. + atol : float, optional + Tolerance for comparison of verification that vectors are unit + length. + Returns ------- is_hemi : bool @@ -37,7 +41,7 @@ def is_hemispherical(vecs): """ if vecs.shape[1] != 3: raise ValueError("Input vectors must be 3D vectors") - if not np.allclose(1, np.linalg.norm(vecs, axis=1)): + if not np.allclose(1, np.linalg.norm(vecs, axis=1), atol=atol): raise ValueError("Input vectors must be unit vectors") # Generate all pairwise cross products @@ -67,6 +71,46 @@ def is_hemispherical(vecs): pole = np.array([0.0, 0.0, 0.0]) return is_hemi, pole -def is_bval_bvec_hemispherical(bval_file, bvec_file): - _, bvecs = read_bvals_bvecs(bval_file, bvec_file) - return is_hemispherical(bvecs[1:]) \ No newline at end of file + +def is_bval_bvec_hemispherical(gtab=None, bval_file=None, bvec_file=None, + atol=10e-4): + """ + Test whether all points on a unit sphere lie in the same hemisphere + for a DIPY GradientTable object or bval and bvec files + + Parameters + ---------- + gtab : a DIPY GradientTable object, optional + + bval_file : str, optional + Full path to a file with b-values. + + bvec_file : str, optional + Full path to a file with b-vectors. + + atol : float + + Returns + ------- + is_hemi : bool + If True, one can find a hemisphere that contains all the points. + If False, then the points do not lie in any hemisphere + + pole : numpy.ndarray + If `is_hemi == True`, then pole is the "central" pole of the + input vectors. Otherwise, pole is the zero vector. + """ + if gtab is None: + if bval_file is None or bvec_file is None: + raise ValueError("`is_bval_bvec_hemispherical` requires", + "either a GradientTable object or full paths", + "as input") + gtab = gradient_table(bval_file, bvec_file) + + else: + if bval_file is not None or bvec_file is not None: + raise ValueError("`is_bval_bvec_hemispherical takes` either ", + "a GradientTable object or full paths, but not ", + "both") + + return is_hemispherical(gtab.bvecs[~gtab.b0s_mask], atol=atol) diff --git a/tests/test_utils.py b/tests/test_utils.py index f5fa1b1..905df11 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -5,7 +5,8 @@ import numpy as np -from dmriprep.utils import is_hemispherical +from dmriprep.utils import is_hemispherical, is_bval_bvec_hemispherical +import dipy.data as dpd def uniform_points_on_sphere(npoints=1, hemisphere=True, rotate=(0, 0, 0)): @@ -58,3 +59,11 @@ def test_is_hemispherical(): assert is_hemispherical(vecs)[0] vecs = uniform_points_on_sphere(npoints=100, hemisphere=False) assert not is_hemispherical(vecs)[0] + + +def test_is_bval_bvec_hemispherical(): + data_fname, bval_fname, bvec_fname = dpd.get_fnames() + is_it = is_bval_bvec_hemispherical(bval_file=bval_fname, + bvec_file=bvec_fname) + # It should be! + assert is_it From dd3b321a942a3994e886c3834b6a48e7c074990e Mon Sep 17 00:00:00 2001 From: Anisha Keshavan Date: Mon, 15 Apr 2019 16:07:21 -0700 Subject: [PATCH 3/3] wip: removed TOPUP, but we are not sure if we have defined the eddy inputs correctly --- dmriprep/cli.py | 6 ++- dmriprep/run.py | 117 +++++++++++++++++++++++++++++++++++++++++------- 2 files changed, 107 insertions(+), 16 deletions(-) diff --git a/dmriprep/cli.py b/dmriprep/cli.py index 6a760bd..5a052c8 100644 --- a/dmriprep/cli.py +++ b/dmriprep/cli.py @@ -88,7 +88,11 @@ def main(participant_label, bids_dir, output_dir, is_hemi = is_bval_bvec_hemispherical(subject_inputs['bval_file'], subject_inputs['bvec_file']) if is_hemi: # run the dmriprep without topup workflow - raise NotImplementedError('we still need to unwire TOPUP from our workflow') + run.run_dmriprep_no_pe_with_eddy(**subject_inputs, + working_dir=os.path.join(output_dir, 'scratch'), + out_dir=output_dir, + eddy_niter=eddy_niter, + slice_outlier_threshold=slice_outlier_threshold) else: run.run_dmriprep(**subject_inputs, working_dir=os.path.join(output_dir, 'scratch'), diff --git a/dmriprep/run.py b/dmriprep/run.py index 4ab0f5a..0816a99 100644 --- a/dmriprep/run.py +++ b/dmriprep/run.py @@ -284,7 +284,7 @@ def run_dmriprep_pe(subject_id, dwi_file, dwi_file_AP, dwi_file_PA, -------- dmriprep.run.get_dmriprep_pe_workflow """ - wf = get_dmriprep_pe_workflow() + wf = get_dmriprep_base_workflow() wf.base_dir = op.join(op.abspath(working_dir), subject_id) inputspec = wf.get_node('inputspec') @@ -306,7 +306,76 @@ def run_dmriprep_pe(subject_id, dwi_file, dwi_file_AP, dwi_file_PA, wf.run() -def get_dmriprep_pe_workflow(): +def run_dmriprep_no_pe_with_eddy(subject_id, dwi_file, + bvec_file, bval_file, + subjects_dir, working_dir, out_dir, + eddy_niter=5, slice_outlier_threshold=0.02, **kwargs): + """Run the dmriprep (phase encoded) nipype workflow + + Parameters + ---------- + subject_id : str + Subject identifier + + dwi_file : str + Path to dwi nifti file + + bvec_file : str + Path to bvec file + + bval_file : str + Path to bval file + + subjects_dir : str + Path to subject's freesurfer directory + + working_dir : str + Path to workflow working directory + + out_dir : str + Path to output directory + + eddy_niter : int, default=5 + Fixed number of eddy iterations. See + https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/eddy/UsersGuide#A--niter + + slice_outlier_threshold: int or float + Number of allowed outlier slices per volume. If this is exceeded the + volume is dropped from analysis. If `slice_outlier_threshold` is an + int, it is treated as number of allowed outlier slices. If + `slice_outlier_threshold` is a float between 0 and 1 (exclusive), it is + treated the fraction of allowed outlier slices. + + Notes + ----- + This assumes that there are scans with phase-encode directions AP/PA for + topup. + + See Also + -------- + dmriprep.run.get_dmriprep_pe_workflow + """ + wf = get_dmriprep_base_workflow(0) + wf.base_dir = op.join(op.abspath(working_dir), subject_id) + + inputspec = wf.get_node('inputspec') + inputspec.inputs.subject_id = subject_id + inputspec.inputs.dwi_file = dwi_file + inputspec.inputs.bvec_file = bvec_file + inputspec.inputs.bval_file = bval_file + inputspec.inputs.subjects_dir = subjects_dir + inputspec.inputs.out_dir = op.abspath(out_dir) + inputspec.inputs.eddy_niter = eddy_niter + inputspec.inputs.slice_outlier_threshold = slice_outlier_threshold + + # write the graph (this is saved to the working dir) + wf.write_graph() + wf.config['execution']['remove_unnecessary_outputs'] = False + wf.config['execution']['keep_inputs'] = True + wf.run() + + +def get_dmriprep_base_workflow(use_reverse_phase_encode=True): """Return the dmriprep (phase encoded) nipype workflow Parameters @@ -349,6 +418,12 @@ def get_dmriprep_pe_workflow(): epi_pa = {'echospacing': 66.5e-3, 'enc_dir': 'y'} prep = all_fsl_pipeline(epi_params=epi_ap, altepi_params=epi_pa) + if not use_reverse_phase_encode: + peb_node = prep.get_node('peb_correction') + prep.remove_nodes([peb_node]) + + + # initialize an overall workflow wf = pe.Workflow(name="dmriprep") @@ -361,6 +436,13 @@ def get_dmriprep_pe_workflow(): eddy.inputs.repol = True eddy.inputs.cnr_maps = True eddy.inputs.residuals = True + + if not use_reverse_phase_encode: + acqp_single_file = '/tmp/acqp.txt' + with open(acqp_single_file, 'w') as f: + f.write('0 1 0 .0665') + eddy.inputs.in_acqp = acqp_single_file + import multiprocessing eddy.inputs.num_threads = multiprocessing.cpu_count() import numba.cuda @@ -370,11 +452,12 @@ def get_dmriprep_pe_workflow(): except: eddy.inputs.use_cuda = False - topup = prep.get_node('peb_correction.topup') - topup.inputs.checksize = True + if use_reverse_phase_encode: + topup = prep.get_node('peb_correction.topup') + topup.inputs.checksize = True - applytopup = prep.get_node('peb_correction.unwarp') - applytopup.inputs.checksize = True + applytopup = prep.get_node('peb_correction.unwarp') + applytopup.inputs.checksize = True eddy.inputs.checksize = True @@ -384,10 +467,12 @@ def get_dmriprep_pe_workflow(): wf.connect(prep, ('fsl_eddy.out_corrected', get_path), eddy_quad, 'base_name') wf.connect(inputspec, 'bval_file', eddy_quad, 'bval_file') wf.connect(prep, 'Rotate_Bvec.out_file', eddy_quad, 'bvec_file') - wf.connect(prep, 'peb_correction.topup.out_field', eddy_quad, 'field') - wf.connect(prep, 'gen_index.out_file', eddy_quad, 'idx_file') - wf.connect(prep, 'peb_correction.topup.out_enc_file', eddy_quad, 'param_file') wf.connect(prep, ('fsl_eddy.out_corrected', get_qc_path), eddy_quad, 'output_dir') + wf.connect(prep, 'gen_index.out_file', eddy_quad, 'idx_file') + + if use_reverse_phase_encode: + wf.connect(prep, 'peb_correction.topup.out_enc_file', eddy_quad, 'param_file') + wf.connect(prep, 'peb_correction.topup.out_field', eddy_quad, 'field') # need a mask file for eddy_quad. lets get it from the B0. def get_b0_mask_fn(b0_file): @@ -478,13 +563,15 @@ def num_outliers(scan, outliers): wf.connect(prep, "fsl_eddy.out_outlier_report", id_outliers_node, "outlier_report") - list_merge = pe.Node(niu.Merge(numinputs=2), name="list_merge") - wf.connect(inputspec, 'dwi_file_ap', list_merge, 'in1') - wf.connect(inputspec, 'dwi_file_pa', list_merge, 'in2') - merge = pe.Node(fsl.Merge(dimension='t'), name="mergeAPPA") - wf.connect(merge, 'merged_file', prep, 'inputnode.alt_file') - wf.connect(list_merge, 'out', merge, 'in_files') + if use_reverse_phase_encode: + list_merge = pe.Node(niu.Merge(numinputs=2), name="list_merge") + wf.connect(inputspec, 'dwi_file_ap', list_merge, 'in1') + wf.connect(inputspec, 'dwi_file_pa', list_merge, 'in2') + + merge = pe.Node(fsl.Merge(dimension='t'), name="mergeAPPA") + wf.connect(merge, 'merged_file', prep, 'inputnode.alt_file') + wf.connect(list_merge, 'out', merge, 'in_files') fslroi = pe.Node(fsl.ExtractROI(t_min=0, t_size=1), name="fslroi") wf.connect(prep, "outputnode.out_file", fslroi, "in_file")