Skip to content

Commit

Permalink
download resources on the fly instead of github
Browse files Browse the repository at this point in the history
- HIPPUNFOLD_CACHE_DIR now has model, atlas, and template subdirs
  • Loading branch information
akhanf committed Feb 15, 2024
1 parent e7ca2b2 commit 5489c55
Show file tree
Hide file tree
Showing 12 changed files with 195 additions and 212 deletions.
56 changes: 29 additions & 27 deletions hippunfold/config/snakebids.yml
Original file line number Diff line number Diff line change
Expand Up @@ -338,42 +338,39 @@ singularity:
autotop: 'docker://khanlab/hippunfold_deps:v0.5.0'

xfm_identity: resources/identity_xfm.txt
template: CITI168

template_files:
CITI168:
T1w: resources/CITI168/T1w_head_700um.nii.gz
T2w: resources/CITI168/T2w_head_700um.nii.gz
xfm_corobl: resources/CITI168/CoronalOblique_rigid.txt
crop_ref: resources/CITI168/T2w_300umCoronalOblique_hemi-{hemi}.nii.gz
crop_refT1w: resources/CITI168/T1w_300umCoronalOblique_hemi-{hemi}.nii.gz
Mask_crop: resources/CITI168/Mask_300umCoronalOblique_hemi-{hemi}.nii.gz
T1w: T1w_head_700um.nii.gz
T2w: T2w_head_700um.nii.gz
xfm_corobl: CoronalOblique_rigid.txt
crop_ref: T2w_300umCoronalOblique_hemi-{hemi}.nii.gz
crop_refT1w: T1w_300umCoronalOblique_hemi-{hemi}.nii.gz
Mask_crop: Mask_300umCoronalOblique_hemi-{hemi}.nii.gz
dHCP:
T1w: resources/tpl-dHCP/cohort-1/tpl-dHCP_cohort-1_res-1_T1w.nii.gz
T2w: resources/tpl-dHCP/cohort-1/tpl-dHCP_cohort-1_res-1_T2w.nii.gz
xfm_corobl: resources/tpl-dHCP/cohort-1/tpl-dHCP_cohort-1_to-corobl_affine.txt
crop_ref: resources/CITI168/T2w_300umCoronalOblique_hemi-{hemi}.nii.gz
crop_refT1w: resources/CITI168/T1w_300umCoronalOblique_hemi-{hemi}.nii.gz
Mask_crop: resources/CITI168/Mask_300umCoronalOblique_hemi-{hemi}.nii.gz

atlas:
- multihist7
T1w: tpl-dHCP_cohort-1_res-1_T1w.nii.gz
T2w: tpl-dHCP_cohort-1_res-1_T2w.nii.gz
xfm_corobl: tpl-dHCP_cohort-1_to-corobl_affine.txt
crop_ref: tpl-dHCP_cohort-1_res-1_space-corobl_hemi-{hemi}_T2w.nii.gz
crop_refT1w: tpl-dHCP_cohort-1_res-1_space-corobl_hemi-{hemi}_T1w.nii.gz
Mask_crop: tpl-dHCP_cohort-1_res-1_space-corobl_hemi-{hemi}_desc-hipp_mask.nii.gz

atlas_files:
multihist7:
label_nii: resources/multihist7/sub-maxprob_label-hipp_desc-manualsubfieldsunfoldaligned_dseg.nii.gz
label_list: resources/multihist7/labellist.txt
thick: resources/multihist7/thickness.nii.gz
curv: resources/multihist7/curvature.nii.gz
gyr: resources/multihist7/gyrification.nii.gz
label_nii: sub-maxprob_label-hipp_desc-manualsubfieldsunfoldaligned_dseg.nii.gz
label_list: labellist.txt
thickness: thickness.nii.gz
curvature: curvature.nii.gz
gyrification: gyrification.nii.gz
bigbrain:
label_nii: resources/bigbrain/sub-bigbrain_hemi-{hemi}_label-hipp_desc-manualsubfields_dseg.nii.gz
label_list: resources/bigbrain/sub-bigbrain_labellist.txt
label_nii: sub-bigbrain_hemi-{hemi}_label-hipp_desc-manualsubfields_dseg.nii.gz
label_list: sub-bigbrain_labellist.txt
magdeburg:
label_nii: resources/magdeburg/sub-all_hemi-{hemi}_label-hipp_desc-manualsubfields_maxprob.nii.gz
label_list: resources/magdeburg/magdeburg_labellist.txt
label_nii: sub-all_hemi-{hemi}_label-hipp_desc-manualsubfields_maxprob.nii.gz
label_list: magdeburg_labellist.txt
freesurfer:
label_nii: resources/freesurfer/sub-all_hemi-{hemi}_space-unfold_label-hipp_desc-freesurfersubfields_dseg.nii.gz
label_list: resources/freesurfer/freesurfer_labellist.txt
label_nii: sub-all_hemi-{hemi}_space-unfold_label-hipp_desc-freesurfersubfields_dseg.nii.gz
label_list: freesurfer_labellist.txt


#values to use for re-mapping tissue labels when combining with subfields.
Expand Down Expand Up @@ -407,6 +404,11 @@ no_reg_template: False

modality: T2w

template: CITI168

atlas:
- multihist7



#these will be downloaded to ~/.cache/hippunfold
Expand Down
1 change: 1 addition & 0 deletions hippunfold/workflow/Snakefile
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,7 @@ work = os.path.join(config["root"], "work")


include: "rules/common.smk"
include: "rules/download.smk"
include: "rules/preproc_t1.smk"


Expand Down
2 changes: 1 addition & 1 deletion hippunfold/workflow/rules/common.smk
Original file line number Diff line number Diff line change
Expand Up @@ -405,7 +405,7 @@ def get_download_dir():
dirs = AppDirs("hippunfold", "khanlab")
download_dir = dirs.user_cache_dir
return download_dir


rule archive_work_after_final:
input:
Expand Down
49 changes: 49 additions & 0 deletions hippunfold/workflow/rules/download.smk
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# populate the HIPPUNFOLD_CACHE_DIR folder as needed

download_dir = get_download_dir()


def get_model_tar():

if config["force_nnunet_model"]:
model_name = config["force_nnunet_model"]
else:
model_name = config["modality"]

local_tar = config["resource_urls"]["nnunet_model"].get(model_name, None)
if local_tar == None:
print(f"ERROR: {model_name} does not exist in nnunet_model in the config file")

return (Path(download_dir) / "model" / Path(local_tar).name).absolute()


rule download_nnunet_model:
params:
url=config["resource_urls"]["nnunet_model"][config["force_nnunet_model"]]
if config["force_nnunet_model"]
else config["resource_urls"]["nnunet_model"][config["modality"]],
model_dir=Path(download_dir) / "model",
output:
model_tar=get_model_tar(),
container:
config["singularity"]["autotop"]
shell:
"mkdir -p {params.model_dir} && wget https://{params.url} -O {output}"


rule download_extract_atlas_or_template:
params:
url=lambda wildcards: config["resource_urls"][wildcards.resource_type][
wildcards.atlas
],
output:
unzip_dir=directory(
Path(download_dir) / "{resource_type,atlas|template}" / "{atlas}"
),
container:
config["singularity"]["autotop"]
shadow:
"minimal"
shell:
"wget https://{params.url} -O temp.zip && "
" unzip -d {output.unzip_dir} temp.zip"
51 changes: 20 additions & 31 deletions hippunfold/workflow/rules/gifti.smk
Original file line number Diff line number Diff line change
Expand Up @@ -462,13 +462,11 @@ rule metric_to_nii:
label="hipp",
**config["subj_wildcards"]
),
atlas_dir=Path(download_dir) / "atlas" / "multihist7",
params:
interp="-nearest-vertex 1",
refflatnii=os.path.join(
workflow.basedir,
"..",
config["atlas_files"]["multihist7"]["label_nii"],
),
refflatnii=lambda wildcards, input: Path(input.atlas_dir)
/ config["atlas_files"]["multihist7"]["label_nii"],
output:
metric_nii=bids(
root=work,
Expand Down Expand Up @@ -526,26 +524,18 @@ rule unfolded_registration:
label="hipp",
**config["subj_wildcards"]
),
atlas_dir=lambda wildcards: Path(download_dir) / "atlas" / wildcards.atlas,
params:
antsparams="-d 2 -t so",
outsuffix="tmp",
warpfn="tmp1Warp.nii.gz",
invwarpfn="tmp1InverseWarp.nii.gz",
refthickness=lambda wildcards: os.path.join(
workflow.basedir,
"..",
config["atlas_files"][wildcards.atlas]["thick"],
),
refcurvature=lambda wildcards: os.path.join(
workflow.basedir,
"..",
config["atlas_files"][wildcards.atlas]["curv"],
),
refgyrification=lambda wildcards: os.path.join(
workflow.basedir,
"..",
config["atlas_files"][wildcards.atlas]["gyr"],
),
refthickness=lambda wildcards, input: Path(input.atlas_dir)
/ config["atlas_files"][wildcards.atlas]["thickness"],
refcurvature=lambda wildcards, input: Path(input.atlas_dir)
/ config["atlas_files"][wildcards.atlas]["curvature"],
refgyrification=lambda wildcards, input: Path(input.atlas_dir)
/ config["atlas_files"][wildcards.atlas]["gyrification"],
output:
warp=bids(
root=work,
Expand Down Expand Up @@ -1057,9 +1047,6 @@ rule calculate_thickness_from_surface2:
rule resample_atlas_to_refvol:
"""this is just done in case the atlas has a different unfolded config than the current run"""
input:
atlas=lambda wildcards: os.path.join(
workflow.basedir, "..", config["atlas_files"][wildcards.atlas]["label_nii"]
),
refvol=bids(
root=root,
space="unfold",
Expand All @@ -1068,6 +1055,10 @@ rule resample_atlas_to_refvol:
suffix="refvol.nii.gz",
**config["subj_wildcards"]
),
atlas_dir=lambda wildcards: Path(download_dir) / "atlas" / wildcards.atlas,
params:
atlas=lambda wildcards, input: Path(input.atlas_dir)
/ config["atlas_files"][wildcards.atlas]["label_nii"],
output:
label_nii=bids(
root=work,
Expand All @@ -1094,7 +1085,7 @@ rule resample_atlas_to_refvol:
group:
"subj"
shell:
"antsApplyTransforms -d 3 -n MultiLabel -i {input.atlas} -r {input.refvol} -o {output.label_nii} -v &> {log}"
"antsApplyTransforms -d 3 -n MultiLabel -i {params.atlas} -r {input.refvol} -o {output.label_nii} -v &> {log}"


rule nii_to_label_gii:
Expand All @@ -1116,12 +1107,10 @@ rule nii_to_label_gii:
"unfold_template_hipp",
"tpl-avg_space-unfold_den-{density}_midthickness.surf.gii",
),
label_list=lambda wildcards: os.path.join(
workflow.basedir,
"..",
config["atlas_files"][wildcards.atlas]["label_list"],
),
atlas_dir=lambda wildcards: Path(download_dir) / "atlas" / wildcards.atlas,
params:
label_list=lambda wildcards, input: Path(input.atlas_dir)
/ config["atlas_files"][wildcards.atlas]["label_list"],
structure_type=lambda wildcards: hemi_to_structure[wildcards.hemi],
output:
label_gii=bids(
Expand All @@ -1142,8 +1131,8 @@ rule nii_to_label_gii:
shadow:
"minimal"
shell:
"wb_command -volume-to-surface-mapping {input.label_nii} {input.surf} temp.shape.gii -enclosing && "
"wb_command -metric-label-import temp.shape.gii {input.label_list} {output.label_gii} && "
"wb_command -volume-to-surface-mapping {params.label_list} {input.surf} temp.shape.gii -enclosing && "
"wb_command -metric-label-import temp.shape.gii {params.label_list} {output.label_gii} && "
"wb_command -set-structure {output.label_gii} {params.structure_type}"


Expand Down
60 changes: 13 additions & 47 deletions hippunfold/workflow/rules/nnunet.smk
Original file line number Diff line number Diff line change
Expand Up @@ -41,22 +41,6 @@ def get_nnunet_input(wildcards):
return nii


def get_model_tar():

download_dir=get_download_dir()

if config["force_nnunet_model"]:
model_name = config["force_nnunet_model"]
else:
model_name = config["modality"]

local_tar = config["resource_urls"]["nnunet_model"].get(model_name, None)
if local_tar == None:
print(f"ERROR: {model_name} does not exist in nnunet_model in the config file")

return os.path.abspath(os.path.join(download_dir, local_tar.split("/")[-1]))


def parse_task_from_tar(wildcards, input):
match = re.search("Task[0-9]{3}_[\w]+", input.model_tar)
if match:
Expand Down Expand Up @@ -84,19 +68,6 @@ def parse_trainer_from_tar(wildcards, input):
return trainer


rule download_model:
params:
url=config["resource_urls"]["nnunet_model"][config["force_nnunet_model"]]
if config["force_nnunet_model"]
else config["resource_urls"]["nnunet_model"][config["modality"]],
output:
model_tar=get_model_tar(),
container:
config["singularity"]["autotop"]
shell:
"wget https://{params.url} -O {output}"


rule run_inference:
""" This rule uses either GPU or CPU .
It also runs in an isolated folder (shadow), with symlinks to inputs in that folder, copying over outputs once complete, so temp files are not retained"""
Expand Down Expand Up @@ -201,22 +172,17 @@ rule unflip_nnunet_nii:
" {input.unflip_ref} -push FLIPPED -copy-transform -o {output.nnunet_seg} "


def get_f3d_ref(wildcards):
def get_f3d_ref(wildcards, input):

if config["modality"] == "T2w":
nii = (
os.path.join(
workflow.basedir,
"..",
config["template_files"][config["template"]]["crop_ref"],
),
Path(input.template_dir)
/ config["template_files"][config["template"]]["crop_ref"]
)
elif config["modality"] == "T1w":
nii = (
os.path.join(
workflow.basedir,
"..",
config["template_files"][config["template"]]["crop_refT1w"],
),
Path(input.template_dir)
/ config["template_files"][config["template"]]["crop_refT1w"]
)
else:
raise ValueError("modality not supported for nnunet!")
Expand Down Expand Up @@ -245,6 +211,8 @@ rule qc_nnunet_f3d:
space="corobl",
hemi="{hemi}"
),
template_dir=Path(download_dir) / "template" / config["template"],
params:
ref=get_f3d_ref,
output:
cpp=bids(
Expand Down Expand Up @@ -288,8 +256,8 @@ rule qc_nnunet_f3d:
group:
"subj"
shell:
"reg_f3d -flo {input.img} -ref {input.ref} -res {output.res} -cpp {output.cpp} &> {log} && "
"reg_resample -flo {input.seg} -cpp {output.cpp} -ref {input.ref} -res {output.res_mask} -inter 0 &> {log}"
"reg_f3d -flo {input.img} -ref {params.ref} -res {output.res} -cpp {output.cpp} &> {log} && "
"reg_resample -flo {input.seg} -cpp {output.cpp} -ref {params.ref} -res {output.res_mask} -inter 0 &> {log}"


rule qc_nnunet_dice:
Expand All @@ -303,13 +271,11 @@ rule qc_nnunet_dice:
space="template",
hemi="{hemi}"
),
ref=os.path.join(
workflow.basedir,
"..",
config["template_files"][config["template"]]["Mask_crop"],
),
template_dir=Path(download_dir) / "template" / config["template"],
params:
hipp_lbls=[1, 2, 7, 8],
ref=lambda wildcards, input: Path(input.template_dir)
/ config["template_files"][config["template"]]["Mask_crop"],
output:
dice=report(
bids(
Expand Down
11 changes: 5 additions & 6 deletions hippunfold/workflow/rules/preproc_seg.smk
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,10 @@ rule warp_seg_to_corobl_crop:
desc="affine",
type_="itk"
),
ref=os.path.join(
workflow.basedir,
"..",
config["template_files"][config["template"]]["crop_ref"],
),
template_dir=Path(download_dir) / "template" / config["template"],
params:
ref=lambda wildcards, input: Path(input.template_dir)
/ config["template_files"][config["template"]]["crop_ref"],
output:
nii=bids(
root=work,
Expand All @@ -60,7 +59,7 @@ rule warp_seg_to_corobl_crop:
"subj"
shell:
"ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS={threads} "
"antsApplyTransforms -d 3 --interpolation MultiLabel -i {input.nii} -o {output.nii} -r {input.ref} -t {input.xfm}"
"antsApplyTransforms -d 3 --interpolation MultiLabel -i {input.nii} -o {output.nii} -r {params.ref} -t {input.xfm}"


rule lr_flip_seg:
Expand Down
Loading

0 comments on commit 5489c55

Please sign in to comment.