From d97f843611649196e63b35d600f756ecde7c16bb Mon Sep 17 00:00:00 2001 From: Yeun Kim Date: Fri, 2 Dec 2016 14:39:07 -0800 Subject: [PATCH 01/14] shorter task label name --- run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/run.py b/run.py index c5f396c..2f312ac 100644 --- a/run.py +++ b/run.py @@ -343,7 +343,7 @@ def run_diffusion_processsing(**args): type='bold', extensions=["nii.gz", "nii"])] for fmritcs in bolds: - fmriname = "_".join(fmritcs.split("sub-")[-1].split("_")[1:]).split(".")[0] + fmriname = "_".join(fmritcs.split("sub-")[-1].split("_")[1:-1]).split(".")[0] assert fmriname fmriscout = fmritcs.replace("_bold", "_sbref") From ce1f2426aa34d0b6bbb0738b2bee4ee5a31f683e Mon Sep 17 00:00:00 2001 From: Yeun Kim Date: Fri, 2 Dec 2016 15:04:57 -0800 Subject: [PATCH 02/14] dwi processing (multiple directions, runs) --- run.py | 446 ++++++++++++++++++++++++++++++++------------------------- 1 file changed, 252 insertions(+), 194 deletions(-) diff --git a/run.py b/run.py index 2f312ac..12e07a1 100644 --- a/run.py +++ b/run.py @@ -12,6 +12,7 @@ from functools import partial from collections import OrderedDict + def run(command, env={}, cwd=None): merged_env = os.environ merged_env.update(env) @@ -25,59 +26,62 @@ def run(command, env={}, cwd=None): if line == '' and process.poll() != None: break if process.returncode != 0: - raise Exception("Non zero return code: %d"%process.returncode) + raise Exception("Non zero return code: %d" % process.returncode) + -grayordinatesres = "2" # This is currently the only option for which the is an atlas +grayordinatesres = "2" # This is currently the only option for which the is an atlas lowresmesh = 32 + def run_pre_freesurfer(**args): args.update(os.environ) args["t1"] = "@".join(t1ws) args["t2"] = "@".join(t2ws) cmd = '{HCPPIPEDIR}/PreFreeSurfer/PreFreeSurferPipeline.sh ' + \ - '--path="{path}" ' + \ - '--subject="{subject}" ' + \ - '--t1="{t1}" ' + \ - '--t2="{t2}" ' + \ - '--t1template="{HCPPIPEDIR_Templates}/MNI152_T1_{t1_template_res:.1f}mm.nii.gz" ' + \ - '--t1templatebrain="{HCPPIPEDIR_Templates}/MNI152_T1_{t1_template_res:.1f}mm_brain.nii.gz" ' + \ - '--t1template2mm="{HCPPIPEDIR_Templates}/MNI152_T1_2mm.nii.gz" ' + \ - '--t2template="{HCPPIPEDIR_Templates}/MNI152_T2_{t2_template_res:.1f}mm.nii.gz" ' + \ - '--t2templatebrain="{HCPPIPEDIR_Templates}/MNI152_T2_{t2_template_res:.1f}mm_brain.nii.gz" ' + \ - '--t2template2mm="{HCPPIPEDIR_Templates}/MNI152_T2_2mm.nii.gz" ' + \ - '--templatemask="{HCPPIPEDIR_Templates}/MNI152_T1_{t1_template_res:.1f}mm_brain_mask.nii.gz" ' + \ - '--template2mmmask="{HCPPIPEDIR_Templates}/MNI152_T1_2mm_brain_mask_dil.nii.gz" ' + \ - '--brainsize="150" ' + \ - '--fnirtconfig="{HCPPIPEDIR_Config}/T1_2_MNI152_2mm.cnf" ' + \ - '--fmapmag="{fmapmag}" ' + \ - '--fmapphase="{fmapphase}" ' + \ - '--fmapgeneralelectric="NONE" ' + \ - '--echodiff="{echodiff}" ' + \ - '--SEPhaseNeg="{SEPhaseNeg}" ' + \ - '--SEPhasePos="{SEPhasePos}" ' + \ - '--echospacing="{echospacing}" ' + \ - '--seunwarpdir="{seunwarpdir}" ' + \ - '--t1samplespacing="{t1samplespacing}" ' + \ - '--t2samplespacing="{t2samplespacing}" ' + \ - '--unwarpdir="{unwarpdir}" ' + \ - '--gdcoeffs="NONE" ' + \ - '--avgrdcmethod={avgrdcmethod} ' + \ - '--topupconfig="{HCPPIPEDIR_Config}/b02b0.cnf" ' + \ - '--printcom=""' + '--path="{path}" ' + \ + '--subject="{subject}" ' + \ + '--t1="{t1}" ' + \ + '--t2="{t2}" ' + \ + '--t1template="{HCPPIPEDIR_Templates}/MNI152_T1_{t1_template_res:.1f}mm.nii.gz" ' + \ + '--t1templatebrain="{HCPPIPEDIR_Templates}/MNI152_T1_{t1_template_res:.1f}mm_brain.nii.gz" ' + \ + '--t1template2mm="{HCPPIPEDIR_Templates}/MNI152_T1_2mm.nii.gz" ' + \ + '--t2template="{HCPPIPEDIR_Templates}/MNI152_T2_{t2_template_res:.1f}mm.nii.gz" ' + \ + '--t2templatebrain="{HCPPIPEDIR_Templates}/MNI152_T2_{t2_template_res:.1f}mm_brain.nii.gz" ' + \ + '--t2template2mm="{HCPPIPEDIR_Templates}/MNI152_T2_2mm.nii.gz" ' + \ + '--templatemask="{HCPPIPEDIR_Templates}/MNI152_T1_{t1_template_res:.1f}mm_brain_mask.nii.gz" ' + \ + '--template2mmmask="{HCPPIPEDIR_Templates}/MNI152_T1_2mm_brain_mask_dil.nii.gz" ' + \ + '--brainsize="150" ' + \ + '--fnirtconfig="{HCPPIPEDIR_Config}/T1_2_MNI152_2mm.cnf" ' + \ + '--fmapmag="{fmapmag}" ' + \ + '--fmapphase="{fmapphase}" ' + \ + '--fmapgeneralelectric="NONE" ' + \ + '--echodiff="{echodiff}" ' + \ + '--SEPhaseNeg="{SEPhaseNeg}" ' + \ + '--SEPhasePos="{SEPhasePos}" ' + \ + '--echospacing="{echospacing}" ' + \ + '--seunwarpdir="{seunwarpdir}" ' + \ + '--t1samplespacing="{t1samplespacing}" ' + \ + '--t2samplespacing="{t2samplespacing}" ' + \ + '--unwarpdir="{unwarpdir}" ' + \ + '--gdcoeffs="NONE" ' + \ + '--avgrdcmethod={avgrdcmethod} ' + \ + '--topupconfig="{HCPPIPEDIR_Config}/b02b0.cnf" ' + \ + '--printcom=""' cmd = cmd.format(**args) run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + def run_freesurfer(**args): args.update(os.environ) args["subjectDIR"] = os.path.join(args["path"], args["subject"], "T1w") cmd = '{HCPPIPEDIR}/FreeSurfer/FreeSurferPipeline.sh ' + \ - '--subject="{subject}" ' + \ - '--subjectDIR="{subjectDIR}" ' + \ - '--t1="{path}/{subject}/T1w/T1w_acpc_dc_restore.nii.gz" ' + \ - '--t1brain="{path}/{subject}/T1w/T1w_acpc_dc_restore_brain.nii.gz" ' + \ - '--t2="{path}/{subject}/T1w/T2w_acpc_dc_restore.nii.gz" ' + \ - '--printcom=""' + '--subject="{subject}" ' + \ + '--subjectDIR="{subjectDIR}" ' + \ + '--t1="{path}/{subject}/T1w/T1w_acpc_dc_restore.nii.gz" ' + \ + '--t1brain="{path}/{subject}/T1w/T1w_acpc_dc_restore_brain.nii.gz" ' + \ + '--t2="{path}/{subject}/T1w/T2w_acpc_dc_restore.nii.gz" ' + \ + '--printcom=""' cmd = cmd.format(**args) if not os.path.exists(os.path.join(args["subjectDIR"], "fsaverage")): @@ -93,108 +97,117 @@ def run_freesurfer(**args): run(cmd, cwd=args["path"], env={"NSLOTS": str(args["n_cpus"]), "OMP_NUM_THREADS": str(args["n_cpus"])}) + def run_post_freesurfer(**args): args.update(os.environ) cmd = '{HCPPIPEDIR}/PostFreeSurfer/PostFreeSurferPipeline.sh ' + \ - '--path="{path}" ' + \ - '--subject="{subject}" ' + \ - '--surfatlasdir="{HCPPIPEDIR_Templates}/standard_mesh_atlases" ' + \ - '--grayordinatesdir="{HCPPIPEDIR_Templates}/91282_Greyordinates" ' + \ - '--grayordinatesres="{grayordinatesres:s}" ' + \ - '--hiresmesh="164" ' + \ - '--lowresmesh="{lowresmesh:d}" ' + \ - '--subcortgraylabels="{HCPPIPEDIR_Config}/FreeSurferSubcorticalLabelTableLut.txt" ' + \ - '--freesurferlabels="{HCPPIPEDIR_Config}/FreeSurferAllLut.txt" ' + \ - '--refmyelinmaps="{HCPPIPEDIR_Templates}/standard_mesh_atlases/Conte69.MyelinMap_BC.164k_fs_LR.dscalar.nii" ' + \ - '--regname="FS" ' + \ - '--printcom=""' + '--path="{path}" ' + \ + '--subject="{subject}" ' + \ + '--surfatlasdir="{HCPPIPEDIR_Templates}/standard_mesh_atlases" ' + \ + '--grayordinatesdir="{HCPPIPEDIR_Templates}/91282_Greyordinates" ' + \ + '--grayordinatesres="{grayordinatesres:s}" ' + \ + '--hiresmesh="164" ' + \ + '--lowresmesh="{lowresmesh:d}" ' + \ + '--subcortgraylabels="{HCPPIPEDIR_Config}/FreeSurferSubcorticalLabelTableLut.txt" ' + \ + '--freesurferlabels="{HCPPIPEDIR_Config}/FreeSurferAllLut.txt" ' + \ + '--refmyelinmaps="{HCPPIPEDIR_Templates}/standard_mesh_atlases/Conte69.MyelinMap_BC.164k_fs_LR.dscalar.nii" ' + \ + '--regname="FS" ' + \ + '--printcom=""' cmd = cmd.format(**args) run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + def run_generic_fMRI_volume_processsing(**args): args.update(os.environ) cmd = '{HCPPIPEDIR}/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh ' + \ - '--path={path} ' + \ - '--subject={subject} ' + \ - '--fmriname={fmriname} ' + \ - '--fmritcs={fmritcs} ' + \ - '--fmriscout={fmriscout} ' + \ - '--SEPhaseNeg={SEPhaseNeg} ' + \ - '--SEPhasePos={SEPhasePos} ' + \ - '--fmapmag="NONE" ' + \ - '--fmapphase="NONE" ' + \ - '--fmapgeneralelectric="NONE" ' + \ - '--echospacing={echospacing} ' + \ - '--echodiff="NONE" ' + \ - '--unwarpdir={unwarpdir} ' + \ - '--fmrires={fmrires:s} ' + \ - '--dcmethod={dcmethod} ' + \ - '--gdcoeffs="NONE" ' + \ - '--topupconfig={HCPPIPEDIR_Config}/b02b0.cnf ' + \ - '--printcom="" ' + \ - '--biascorrection={biascorrection} ' + \ - '--mctype="MCFLIRT"' + '--path={path} ' + \ + '--subject={subject} ' + \ + '--fmriname={fmriname} ' + \ + '--fmritcs={fmritcs} ' + \ + '--fmriscout={fmriscout} ' + \ + '--SEPhaseNeg={SEPhaseNeg} ' + \ + '--SEPhasePos={SEPhasePos} ' + \ + '--fmapmag="NONE" ' + \ + '--fmapphase="NONE" ' + \ + '--fmapgeneralelectric="NONE" ' + \ + '--echospacing={echospacing} ' + \ + '--echodiff="NONE" ' + \ + '--unwarpdir={unwarpdir} ' + \ + '--fmrires={fmrires:s} ' + \ + '--dcmethod={dcmethod} ' + \ + '--gdcoeffs="NONE" ' + \ + '--topupconfig={HCPPIPEDIR_Config}/b02b0.cnf ' + \ + '--printcom="" ' + \ + '--biascorrection={biascorrection} ' + \ + '--mctype="MCFLIRT"' cmd = cmd.format(**args) run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + def run_generic_fMRI_surface_processsing(**args): print(args) args.update(os.environ) cmd = '{HCPPIPEDIR}/fMRISurface/GenericfMRISurfaceProcessingPipeline.sh ' + \ - '--path={path} ' + \ - '--subject={subject} ' + \ - '--fmriname={fmriname} ' + \ - '--lowresmesh="{lowresmesh:d}" ' + \ - '--fmrires={fmrires:s} ' + \ - '--smoothingFWHM={fmrires:s} ' + \ - '--grayordinatesres="{grayordinatesres:s}" ' + \ - '--regname="FS"' + '--path={path} ' + \ + '--subject={subject} ' + \ + '--fmriname={fmriname} ' + \ + '--lowresmesh="{lowresmesh:d}" ' + \ + '--fmrires={fmrires:s} ' + \ + '--smoothingFWHM={fmrires:s} ' + \ + '--grayordinatesres="{grayordinatesres:s}" ' + \ + '--regname="FS"' cmd = cmd.format(**args) run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + def run_diffusion_processsing(**args): + # print(args) args.update(os.environ) cmd = '{HCPPIPEDIR}/DiffusionPreprocessing/DiffPreprocPipeline.sh ' + \ - '--posData="{posData}" ' +\ - '--negData="{negData}" ' + \ - '--path="{path}" ' + \ - '--subject="{subject}" ' + \ - '--echospacing="{echospacing}" '+ \ - '--PEdir={PEdir} ' + \ - '--gdcoeffs="NONE" ' + \ - '--printcom=""' + '--posData="{posData}" ' + \ + '--negData="{negData}" ' + \ + '--path="{path}" ' + \ + '--subject="{subject}" ' + \ + '--echospacing="{echospacing}" ' + \ + '--PEdir={PEdir} ' + \ + '--gdcoeffs="NONE" ' + \ + '--dwiname="{dwiname}" ' + \ + '--printcom=""' cmd = cmd.format(**args) + print('\n', cmd, '\n') run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + __version__ = open('/version').read() parser = argparse.ArgumentParser(description='HCP Pipeliens BIDS App (T1w, T2w, fMRI)') parser.add_argument('bids_dir', help='The directory with the input dataset ' - 'formatted according to the BIDS standard.') + 'formatted according to the BIDS standard.') parser.add_argument('output_dir', help='The directory where the output files ' - 'should be stored. If you are running group level analysis ' - 'this folder should be prepopulated with the results of the' - 'participant level analysis.') + 'should be stored. If you are running group level analysis ' + 'this folder should be prepopulated with the results of the' + 'participant level analysis.') parser.add_argument('analysis_level', help='Level of the analysis that will be performed. ' - 'Multiple participant level analyses can be run independently ' - '(in parallel) using the same output_dir.', + 'Multiple participant level analyses can be run independently ' + '(in parallel) using the same output_dir.', choices=['participant']) parser.add_argument('--participant_label', help='The label of the participant that should be analyzed. The label ' - 'corresponds to sub- from the BIDS spec ' - '(so it does not include "sub-"). If this parameter is not ' - 'provided all subjects should be analyzed. Multiple ' - 'participants can be specified with a space separated list.', - nargs="+") + 'corresponds to sub- from the BIDS spec ' + '(so it does not include "sub-"). If this parameter is not ' + 'provided all subjects should be analyzed. Multiple ' + 'participants can be specified with a space separated list.', + nargs="+") parser.add_argument('--n_cpus', help='Number of CPUs/cores available to use.', - default=1, type=int) + default=1, type=int) parser.add_argument('--stages', help='Which stages to run. Space separated list.', - nargs="+", choices=['PreFreeSurfer', 'FreeSurfer', - 'PostFreeSurfer', 'fMRIVolume', - 'fMRISurface', 'DiffusionPreprocessing'], - default=['PreFreeSurfer', 'FreeSurfer', 'PostFreeSurfer', - 'fMRIVolume', 'fMRISurface', - 'DiffusionPreprocessing']) -parser.add_argument('--license_key', help='FreeSurfer license key - letters and numbers after "*" in the email you received after registration. To register (for free) visit https://surfer.nmr.mgh.harvard.edu/registration.html', + nargs="+", choices=['PreFreeSurfer', 'FreeSurfer', + 'PostFreeSurfer', 'fMRIVolume', + 'fMRISurface', 'DiffusionPreprocessing'], + default=['PreFreeSurfer', 'FreeSurfer', 'PostFreeSurfer', + 'fMRIVolume', 'fMRISurface', + 'DiffusionPreprocessing']) +parser.add_argument('--license_key', + help='FreeSurfer license key - letters and numbers after "*" in the email you received after registration. To register (for free) visit https://surfer.nmr.mgh.harvard.edu/registration.html', required=True) parser.add_argument('-v', '--version', action='version', version='HCP Pielines BIDS App version {}'.format(__version__)) @@ -225,16 +238,16 @@ def run_diffusion_processsing(**args): t2ws = [f.filename for f in layout.get(subject=subject_label, type='T2w', extensions=["nii.gz", "nii"])] - assert (len(t1ws) > 0), "No T1w files found for subject %s!"%subject_label - assert (len(t2ws) > 0), "No T2w files found for subject %s!"%subject_label + assert (len(t1ws) > 0), "No T1w files found for subject %s!" % subject_label + assert (len(t2ws) > 0), "No T2w files found for subject %s!" % subject_label available_resolutions = [0.7, 0.8, 1.0] t1_zooms = nibabel.load(t1ws[0]).get_header().get_zooms() t1_res = float(min(t1_zooms[:3])) - t1_template_res = min(available_resolutions, key=lambda x:abs(x-t1_res)) + t1_template_res = min(available_resolutions, key=lambda x: abs(x - t1_res)) t2_zooms = nibabel.load(t2ws[0]).get_header().get_zooms() t2_res = float(min(t2_zooms[:3])) - t2_template_res = min(available_resolutions, key=lambda x:abs(x-t2_res)) + t2_template_res = min(available_resolutions, key=lambda x: abs(x - t2_res)) fieldmap_set = layout.get_fieldmap(t1ws[0]) fmap_args = {"fmapmag": "NONE", @@ -254,30 +267,30 @@ def run_diffusion_processsing(**args): t2_spacing = layout.get_metadata(t2ws[0])["RealDwellTime"] unwarpdir = layout.get_metadata(t1ws[0])["PhaseEncodingDirection"] - unwarpdir = unwarpdir.replace("i","x").replace("j", "y").replace("k", "z") + unwarpdir = unwarpdir.replace("i", "x").replace("j", "y").replace("k", "z") if len(unwarpdir) == 2: unwarpdir = "-" + unwarpdir[0] - fmap_args.update({"t1samplespacing": "%.8f"%t1_spacing, - "t2samplespacing": "%.8f"%t2_spacing, + fmap_args.update({"t1samplespacing": "%.8f" % t1_spacing, + "t2samplespacing": "%.8f" % t2_spacing, "unwarpdir": unwarpdir}) if fieldmap_set["type"] == "phasediff": - merged_file = "%s/tmp/%s/magfile.nii.gz"%(args.output_dir, subject_label) - run("mkdir -p %s/tmp/%s/ && fslmerge -t %s %s %s"%(args.output_dir, - subject_label, - merged_file, - fieldmap_set["magnitude1"], - fieldmap_set["magnitude2"])) + merged_file = "%s/tmp/%s/magfile.nii.gz" % (args.output_dir, subject_label) + run("mkdir -p %s/tmp/%s/ && fslmerge -t %s %s %s" % (args.output_dir, + subject_label, + merged_file, + fieldmap_set["magnitude1"], + fieldmap_set["magnitude2"])) phasediff_metadata = layout.get_metadata(fieldmap_set["phasediff"]) te_diff = phasediff_metadata["EchoTime2"] - phasediff_metadata["EchoTime1"] # HCP expects TE in miliseconds - te_diff = te_diff*1000.0 + te_diff = te_diff * 1000.0 fmap_args.update({"fmapmag": merged_file, "fmapphase": fieldmap_set["phasediff"], - "echodiff": "%.6f"%te_diff, + "echodiff": "%.6f" % te_diff, "avgrdcmethod": "SiemensFieldMap"}) elif fieldmap_set["type"] == "epi": SEPhaseNeg = None @@ -290,9 +303,9 @@ def run_diffusion_processsing(**args): SEPhasePos = fieldmap seunwarpdir = layout.get_metadata(fieldmap_set["epi"][0])["PhaseEncodingDirection"] - seunwarpdir = seunwarpdir.replace("-", "").replace("i","x").replace("j", "y").replace("k", "z") + seunwarpdir = seunwarpdir.replace("-", "").replace("i", "x").replace("j", "y").replace("k", "z") - #TODO check consistency of echo spacing instead of assuming it's all the same + # TODO check consistency of echo spacing instead of assuming it's all the same if "EffectiveEchoSpacing" in layout.get_metadata(fieldmap_set["epi"][0]): echospacing = layout.get_metadata(fieldmap_set["epi"][0])["EffectiveEchoSpacing"] elif "TotalReadoutTime" in layout.get_metadata(fieldmap_set["epi"][0]): @@ -306,35 +319,36 @@ def run_diffusion_processsing(**args): phase_len = nibabel.load(fieldmap_set["epi"][0]).shape[{"x": 0, "y": 1}[seunwarpdir]] echospacing = TotalReadoutTime / float(phase_len - 1) else: - raise RuntimeError("EffectiveEchoSpacing or TotalReadoutTime defined for the fieldmap intended for T1w image. Please fix your BIDS dataset.") + raise RuntimeError( + "EffectiveEchoSpacing or TotalReadoutTime defined for the fieldmap intended for T1w image. Please fix your BIDS dataset.") fmap_args.update({"SEPhaseNeg": SEPhaseNeg, "SEPhasePos": SEPhasePos, - "echospacing": "%.6f"%echospacing, + "echospacing": "%.6f" % echospacing, "seunwarpdir": seunwarpdir, "avgrdcmethod": "TOPUP"}) - #TODO add support for GE fieldmaps + # TODO add support for GE fieldmaps struct_stages_dict = OrderedDict([("PreFreeSurfer", partial(run_pre_freesurfer, - path=args.output_dir, - subject="sub-%s"%subject_label, - t1ws=t1ws, - t2ws=t2ws, - n_cpus=args.n_cpus, - t1_template_res=t1_template_res, - t2_template_res=t2_template_res, - **fmap_args)), - ("FreeSurfer", partial(run_freesurfer, - path=args.output_dir, - subject="sub-%s"%subject_label, - n_cpus=args.n_cpus)), - ("PostFreeSurfer", partial(run_post_freesurfer, - path=args.output_dir, - subject="sub-%s"%subject_label, - grayordinatesres=grayordinatesres, - lowresmesh=lowresmesh, - n_cpus=args.n_cpus)) - ]) + path=args.output_dir, + subject="sub-%s" % subject_label, + t1ws=t1ws, + t2ws=t2ws, + n_cpus=args.n_cpus, + t1_template_res=t1_template_res, + t2_template_res=t2_template_res, + **fmap_args)), + ("FreeSurfer", partial(run_freesurfer, + path=args.output_dir, + subject="sub-%s" % subject_label, + n_cpus=args.n_cpus)), + ("PostFreeSurfer", partial(run_post_freesurfer, + path=args.output_dir, + subject="sub-%s" % subject_label, + grayordinatesres=grayordinatesres, + lowresmesh=lowresmesh, + n_cpus=args.n_cpus)) + ]) for stage, stage_func in struct_stages_dict.iteritems(): if stage in args.stages: stage_func() @@ -343,7 +357,7 @@ def run_diffusion_processsing(**args): type='bold', extensions=["nii.gz", "nii"])] for fmritcs in bolds: - fmriname = "_".join(fmritcs.split("sub-")[-1].split("_")[1:-1]).split(".")[0] + fmriname = fmritcs.split("task-")[1].split("_")[0] assert fmriname fmriscout = fmritcs.replace("_bold", "_sbref") @@ -363,7 +377,7 @@ def run_diffusion_processsing(**args): SEPhasePos = fieldmap echospacing = layout.get_metadata(fmritcs)["EffectiveEchoSpacing"] unwarpdir = layout.get_metadata(fmritcs)["PhaseEncodingDirection"] - unwarpdir = unwarpdir.replace("i","x").replace("j", "y").replace("k", "z") + unwarpdir = unwarpdir.replace("i", "x").replace("j", "y").replace("k", "z") if len(unwarpdir) == 2: unwarpdir = "-" + unwarpdir[0] dcmethod = "TOPUP" @@ -378,63 +392,107 @@ def run_diffusion_processsing(**args): zooms = nibabel.load(fmritcs).get_header().get_zooms() fmrires = float(min(zooms[:3])) - fmrires = "2" # https://github.com/Washington-University/Pipelines/blob/637b35f73697b77dcb1d529902fc55f431f03af7/fMRISurface/scripts/SubcorticalProcessing.sh#L43 + fmrires = "2" # https://github.com/Washington-University/Pipelines/blob/637b35f73697b77dcb1d529902fc55f431f03af7/fMRISurface/scripts/SubcorticalProcessing.sh#L43 # While running '/usr/bin/wb_command -cifti-create-dense-timeseries /scratch/users/chrisgor/hcp_output2/sub-100307/MNINonLinear/Results/EMOTION/EMOTION_temp_subject.dtseries.nii -volume /scratch/users/chrisgor/hcp_output2/sub-100307/MNINonLinear/Results/EMOTION/EMOTION.nii.gz /scratch/users/chrisgor/hcp_output2/sub-100307/MNINonLinear/ROIs/ROIs.2.nii.gz': # ERROR: label volume has a different volume space than data volume func_stages_dict = OrderedDict([("fMRIVolume", partial(run_generic_fMRI_volume_processsing, - path=args.output_dir, - subject="sub-%s"%subject_label, - fmriname=fmriname, - fmritcs=fmritcs, - fmriscout=fmriscout, - SEPhaseNeg=SEPhaseNeg, - SEPhasePos=SEPhasePos, - echospacing=echospacing, - unwarpdir=unwarpdir, - fmrires=fmrires, - dcmethod=dcmethod, - biascorrection=biascorrection, - n_cpus=args.n_cpus)), - ("fMRISurface", partial(run_generic_fMRI_surface_processsing, - path=args.output_dir, - subject="sub-%s"%subject_label, - fmriname=fmriname, - fmrires=fmrires, - n_cpus=args.n_cpus, - grayordinatesres=grayordinatesres, - lowresmesh=lowresmesh)) - ]) + path=args.output_dir, + subject="sub-%s" % subject_label, + fmriname=fmriname, + fmritcs=fmritcs, + fmriscout=fmriscout, + SEPhaseNeg=SEPhaseNeg, + SEPhasePos=SEPhasePos, + echospacing=echospacing, + unwarpdir=unwarpdir, + fmrires=fmrires, + dcmethod=dcmethod, + biascorrection=biascorrection, + n_cpus=args.n_cpus)), + ("fMRISurface", partial(run_generic_fMRI_surface_processsing, + path=args.output_dir, + subject="sub-%s" % subject_label, + fmriname=fmriname, + fmrires=fmrires, + n_cpus=args.n_cpus, + grayordinatesres=grayordinatesres, + lowresmesh=lowresmesh)) + ]) for stage, stage_func in func_stages_dict.iteritems(): if stage in args.stages: stage_func() - dwis = layout.get(subject=subject_label, type='dwi', - extensions=["nii.gz", "nii"]) + # dwis = layout.get(subject=subject_label, type='dwi', + # extensions=["nii.gz", "nii"]) # print(dwis) - # acqs = set(layout.get(target='acquisition', return_type='id', - # subject=subject_label, type='dwi', - # extensions=["nii.gz", "nii"])) - # print(acqs) - # posData = [] - # negData = [] - # for acq in acqs: - # pos = "EMPTY" - # neg = "EMPTY" - # dwis = layout.get(subject=subject_label, - # type='dwi', acquisition=acq, - # extensions=["nii.gz", "nii"]) - # assert len(dwis) <= 2 - # for dwi in dwis: - # dwi = dwi.filename - # if "-" in layout.get_metadata(dwi)["PhaseEncodingDirection"]: - # neg = dwi - # else: - # pos = dwi - # posData.append(pos) - # negData.append(neg) - # - # print(negData) - # print(posData) + + posData = [] + negData = [] + PEdir = "None" + dwiname = "Diffusion" + dirnums = [] + + numruns = set(layout.get(target='run', return_type='id', + subject=subject_label, type='dwi', + extensions=["nii.gz", "nii"])) + + for session in numruns: + acqs = set(layout.get(target='acquisition', return_type='id', + subject=subject_label, type='dwi', + extensions=["nii.gz", "nii"])) + for acq in acqs: + y = [int(s) for s in acq[0:len(acq)] if s.isdigit()] + y = [str(s) for s in y] + y = ''.join(y) + dirnums.append(y) + for dirnum in set(dirnums): + dwiname = "Diffusion" + "_acq-dir" + dirnum + "_run-" + session + for acq in acqs: + if "AP" or "PA" in acqs: + PEdir = 2 + elif "LR" or "RL" in acqs: + PEdir = 1 + else: + RuntimeError("Acquisition direction not specified on dwi file") + pos = "EMPTY" + neg = "EMPTY" + gdcoeffs = "None" + dwis = layout.get(subject=subject_label, + type='dwi', acquisition=acq, run=session, + extensions=["nii.gz", "nii"]) + assert len(dwis) <= 2 + for dwi in dwis: + dwi = dwi.filename + if "-" in layout.get_metadata(dwi)["PhaseEncodingDirection"]: + neg = dwi + negData.append(neg) + else: + pos = dwi + posData.append(pos) + + # print(negData) + # print(posData) + # print(dwiname) + + for posfile, negfile in zip(posData, negData): + echospacing = layout.get_metadata(posfile)["EffectiveEchoSpacing"] * 1000 + dwi_stage_dict = OrderedDict([("DiffusionPreprocessing", partial(run_diffusion_processsing, + posData=posfile, + negData=negfile, + path=args.output_dir, + subject="sub-%s" % subject_label, + echospacing=echospacing, + PEdir=PEdir, + gdcoeffs="NONE", + dwiname=dwiname, + n_cpus=args.n_cpus))]) + + for stage, stage_func in dwi_stage_dict.iteritems(): + if stage in args.stages: + stage_func() + + + From 92cde71e9379ab590007666c0bb9895e993d3d0b Mon Sep 17 00:00:00 2001 From: Yeun Kim Date: Fri, 2 Dec 2016 15:23:05 -0800 Subject: [PATCH 03/14] run label change --- run.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/run.py b/run.py index 12e07a1..b76118b 100644 --- a/run.py +++ b/run.py @@ -69,7 +69,7 @@ def run_pre_freesurfer(**args): '--topupconfig="{HCPPIPEDIR_Config}/b02b0.cnf" ' + \ '--printcom=""' cmd = cmd.format(**args) - run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) def run_freesurfer(**args): @@ -94,8 +94,8 @@ def run_freesurfer(**args): shutil.copytree(os.path.join(os.environ["SUBJECTS_DIR"], "rh.EC_average"), os.path.join(args["subjectDIR"], "rh.EC_average")) - run(cmd, cwd=args["path"], env={"NSLOTS": str(args["n_cpus"]), - "OMP_NUM_THREADS": str(args["n_cpus"])}) + # run(cmd, cwd=args["path"], env={"NSLOTS": str(args["n_cpus"]), + # "OMP_NUM_THREADS": str(args["n_cpus"])}) def run_post_freesurfer(**args): @@ -114,7 +114,7 @@ def run_post_freesurfer(**args): '--regname="FS" ' + \ '--printcom=""' cmd = cmd.format(**args) - run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) def run_generic_fMRI_volume_processsing(**args): @@ -141,7 +141,7 @@ def run_generic_fMRI_volume_processsing(**args): '--biascorrection={biascorrection} ' + \ '--mctype="MCFLIRT"' cmd = cmd.format(**args) - run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) def run_generic_fMRI_surface_processsing(**args): @@ -157,7 +157,7 @@ def run_generic_fMRI_surface_processsing(**args): '--grayordinatesres="{grayordinatesres:s}" ' + \ '--regname="FS"' cmd = cmd.format(**args) - run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) def run_diffusion_processsing(**args): @@ -449,7 +449,7 @@ def run_diffusion_processsing(**args): y = ''.join(y) dirnums.append(y) for dirnum in set(dirnums): - dwiname = "Diffusion" + "_acq-dir" + dirnum + "_run-" + session + dwiname = "Diffusion" + "_acq-dir" + dirnum + "_" + session for acq in acqs: if "AP" or "PA" in acqs: PEdir = 2 From d7823d685e214e9b5f311eb6c8ab8b72adab6b63 Mon Sep 17 00:00:00 2001 From: Yeun Kim Date: Mon, 5 Dec 2016 09:45:26 -0800 Subject: [PATCH 04/14] output directory name change --- run.py | 402 +++++++++++++++++++++++++++++++-------------------------- 1 file changed, 221 insertions(+), 181 deletions(-) diff --git a/run.py b/run.py index b76118b..e697c19 100644 --- a/run.py +++ b/run.py @@ -12,7 +12,6 @@ from functools import partial from collections import OrderedDict - def run(command, env={}, cwd=None): merged_env = os.environ merged_env.update(env) @@ -26,62 +25,59 @@ def run(command, env={}, cwd=None): if line == '' and process.poll() != None: break if process.returncode != 0: - raise Exception("Non zero return code: %d" % process.returncode) - + raise Exception("Non zero return code: %d"%process.returncode) -grayordinatesres = "2" # This is currently the only option for which the is an atlas +grayordinatesres = "2" # This is currently the only option for which the is an atlas lowresmesh = 32 - def run_pre_freesurfer(**args): args.update(os.environ) args["t1"] = "@".join(t1ws) args["t2"] = "@".join(t2ws) cmd = '{HCPPIPEDIR}/PreFreeSurfer/PreFreeSurferPipeline.sh ' + \ - '--path="{path}" ' + \ - '--subject="{subject}" ' + \ - '--t1="{t1}" ' + \ - '--t2="{t2}" ' + \ - '--t1template="{HCPPIPEDIR_Templates}/MNI152_T1_{t1_template_res:.1f}mm.nii.gz" ' + \ - '--t1templatebrain="{HCPPIPEDIR_Templates}/MNI152_T1_{t1_template_res:.1f}mm_brain.nii.gz" ' + \ - '--t1template2mm="{HCPPIPEDIR_Templates}/MNI152_T1_2mm.nii.gz" ' + \ - '--t2template="{HCPPIPEDIR_Templates}/MNI152_T2_{t2_template_res:.1f}mm.nii.gz" ' + \ - '--t2templatebrain="{HCPPIPEDIR_Templates}/MNI152_T2_{t2_template_res:.1f}mm_brain.nii.gz" ' + \ - '--t2template2mm="{HCPPIPEDIR_Templates}/MNI152_T2_2mm.nii.gz" ' + \ - '--templatemask="{HCPPIPEDIR_Templates}/MNI152_T1_{t1_template_res:.1f}mm_brain_mask.nii.gz" ' + \ - '--template2mmmask="{HCPPIPEDIR_Templates}/MNI152_T1_2mm_brain_mask_dil.nii.gz" ' + \ - '--brainsize="150" ' + \ - '--fnirtconfig="{HCPPIPEDIR_Config}/T1_2_MNI152_2mm.cnf" ' + \ - '--fmapmag="{fmapmag}" ' + \ - '--fmapphase="{fmapphase}" ' + \ - '--fmapgeneralelectric="NONE" ' + \ - '--echodiff="{echodiff}" ' + \ - '--SEPhaseNeg="{SEPhaseNeg}" ' + \ - '--SEPhasePos="{SEPhasePos}" ' + \ - '--echospacing="{echospacing}" ' + \ - '--seunwarpdir="{seunwarpdir}" ' + \ - '--t1samplespacing="{t1samplespacing}" ' + \ - '--t2samplespacing="{t2samplespacing}" ' + \ - '--unwarpdir="{unwarpdir}" ' + \ - '--gdcoeffs="NONE" ' + \ - '--avgrdcmethod={avgrdcmethod} ' + \ - '--topupconfig="{HCPPIPEDIR_Config}/b02b0.cnf" ' + \ - '--printcom=""' + '--path="{path}" ' + \ + '--subject="{subject}" ' + \ + '--t1="{t1}" ' + \ + '--t2="{t2}" ' + \ + '--t1template="{HCPPIPEDIR_Templates}/MNI152_T1_{t1_template_res:.1f}mm.nii.gz" ' + \ + '--t1templatebrain="{HCPPIPEDIR_Templates}/MNI152_T1_{t1_template_res:.1f}mm_brain.nii.gz" ' + \ + '--t1template2mm="{HCPPIPEDIR_Templates}/MNI152_T1_2mm.nii.gz" ' + \ + '--t2template="{HCPPIPEDIR_Templates}/MNI152_T2_{t2_template_res:.1f}mm.nii.gz" ' + \ + '--t2templatebrain="{HCPPIPEDIR_Templates}/MNI152_T2_{t2_template_res:.1f}mm_brain.nii.gz" ' + \ + '--t2template2mm="{HCPPIPEDIR_Templates}/MNI152_T2_2mm.nii.gz" ' + \ + '--templatemask="{HCPPIPEDIR_Templates}/MNI152_T1_{t1_template_res:.1f}mm_brain_mask.nii.gz" ' + \ + '--template2mmmask="{HCPPIPEDIR_Templates}/MNI152_T1_2mm_brain_mask_dil.nii.gz" ' + \ + '--brainsize="150" ' + \ + '--fnirtconfig="{HCPPIPEDIR_Config}/T1_2_MNI152_2mm.cnf" ' + \ + '--fmapmag="{fmapmag}" ' + \ + '--fmapphase="{fmapphase}" ' + \ + '--fmapgeneralelectric="NONE" ' + \ + '--echodiff="{echodiff}" ' + \ + '--SEPhaseNeg="{SEPhaseNeg}" ' + \ + '--SEPhasePos="{SEPhasePos}" ' + \ + '--echospacing="{echospacing}" ' + \ + '--seunwarpdir="{seunwarpdir}" ' + \ + '--t1samplespacing="{t1samplespacing}" ' + \ + '--t2samplespacing="{t2samplespacing}" ' + \ + '--unwarpdir="{unwarpdir}" ' + \ + '--gdcoeffs="NONE" ' + \ + '--avgrdcmethod={avgrdcmethod} ' + \ + '--topupconfig="{HCPPIPEDIR_Config}/b02b0.cnf" ' + \ + '--printcom=""' cmd = cmd.format(**args) # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) - def run_freesurfer(**args): args.update(os.environ) args["subjectDIR"] = os.path.join(args["path"], args["subject"], "T1w") cmd = '{HCPPIPEDIR}/FreeSurfer/FreeSurferPipeline.sh ' + \ - '--subject="{subject}" ' + \ - '--subjectDIR="{subjectDIR}" ' + \ - '--t1="{path}/{subject}/T1w/T1w_acpc_dc_restore.nii.gz" ' + \ - '--t1brain="{path}/{subject}/T1w/T1w_acpc_dc_restore_brain.nii.gz" ' + \ - '--t2="{path}/{subject}/T1w/T2w_acpc_dc_restore.nii.gz" ' + \ - '--printcom=""' + '--subject="{subject}" ' + \ + '--subjectDIR="{subjectDIR}" ' + \ + '--t1="{path}/{subject}/T1w/T1w_acpc_dc_restore.nii.gz" ' + \ + '--t1brain="{path}/{subject}/T1w/T1w_acpc_dc_restore_brain.nii.gz" ' + \ + '--t2="{path}/{subject}/T1w/T2w_acpc_dc_restore.nii.gz" ' + \ + '--printcom=""' cmd = cmd.format(**args) if not os.path.exists(os.path.join(args["subjectDIR"], "fsaverage")): @@ -97,117 +93,162 @@ def run_freesurfer(**args): # run(cmd, cwd=args["path"], env={"NSLOTS": str(args["n_cpus"]), # "OMP_NUM_THREADS": str(args["n_cpus"])}) - def run_post_freesurfer(**args): args.update(os.environ) cmd = '{HCPPIPEDIR}/PostFreeSurfer/PostFreeSurferPipeline.sh ' + \ - '--path="{path}" ' + \ - '--subject="{subject}" ' + \ - '--surfatlasdir="{HCPPIPEDIR_Templates}/standard_mesh_atlases" ' + \ - '--grayordinatesdir="{HCPPIPEDIR_Templates}/91282_Greyordinates" ' + \ - '--grayordinatesres="{grayordinatesres:s}" ' + \ - '--hiresmesh="164" ' + \ - '--lowresmesh="{lowresmesh:d}" ' + \ - '--subcortgraylabels="{HCPPIPEDIR_Config}/FreeSurferSubcorticalLabelTableLut.txt" ' + \ - '--freesurferlabels="{HCPPIPEDIR_Config}/FreeSurferAllLut.txt" ' + \ - '--refmyelinmaps="{HCPPIPEDIR_Templates}/standard_mesh_atlases/Conte69.MyelinMap_BC.164k_fs_LR.dscalar.nii" ' + \ - '--regname="FS" ' + \ - '--printcom=""' + '--path="{path}" ' + \ + '--subject="{subject}" ' + \ + '--surfatlasdir="{HCPPIPEDIR_Templates}/standard_mesh_atlases" ' + \ + '--grayordinatesdir="{HCPPIPEDIR_Templates}/91282_Greyordinates" ' + \ + '--grayordinatesres="{grayordinatesres:s}" ' + \ + '--hiresmesh="164" ' + \ + '--lowresmesh="{lowresmesh:d}" ' + \ + '--subcortgraylabels="{HCPPIPEDIR_Config}/FreeSurferSubcorticalLabelTableLut.txt" ' + \ + '--freesurferlabels="{HCPPIPEDIR_Config}/FreeSurferAllLut.txt" ' + \ + '--refmyelinmaps="{HCPPIPEDIR_Templates}/standard_mesh_atlases/Conte69.MyelinMap_BC.164k_fs_LR.dscalar.nii" ' + \ + '--regname="FS" ' + \ + '--printcom=""' cmd = cmd.format(**args) # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) - def run_generic_fMRI_volume_processsing(**args): args.update(os.environ) cmd = '{HCPPIPEDIR}/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh ' + \ - '--path={path} ' + \ - '--subject={subject} ' + \ - '--fmriname={fmriname} ' + \ - '--fmritcs={fmritcs} ' + \ - '--fmriscout={fmriscout} ' + \ - '--SEPhaseNeg={SEPhaseNeg} ' + \ - '--SEPhasePos={SEPhasePos} ' + \ - '--fmapmag="NONE" ' + \ - '--fmapphase="NONE" ' + \ - '--fmapgeneralelectric="NONE" ' + \ - '--echospacing={echospacing} ' + \ - '--echodiff="NONE" ' + \ - '--unwarpdir={unwarpdir} ' + \ - '--fmrires={fmrires:s} ' + \ - '--dcmethod={dcmethod} ' + \ - '--gdcoeffs="NONE" ' + \ - '--topupconfig={HCPPIPEDIR_Config}/b02b0.cnf ' + \ - '--printcom="" ' + \ - '--biascorrection={biascorrection} ' + \ - '--mctype="MCFLIRT"' + '--path={path} ' + \ + '--subject={subject} ' + \ + '--fmriname={fmriname} ' + \ + '--fmritcs={fmritcs} ' + \ + '--fmriscout={fmriscout} ' + \ + '--SEPhaseNeg={SEPhaseNeg} ' + \ + '--SEPhasePos={SEPhasePos} ' + \ + '--fmapmag="NONE" ' + \ + '--fmapphase="NONE" ' + \ + '--fmapgeneralelectric="NONE" ' + \ + '--echospacing={echospacing} ' + \ + '--echodiff="NONE" ' + \ + '--unwarpdir={unwarpdir} ' + \ + '--fmrires={fmrires:s} ' + \ + '--dcmethod={dcmethod} ' + \ + '--gdcoeffs="NONE" ' + \ + '--topupconfig={HCPPIPEDIR_Config}/b02b0.cnf ' + \ + '--printcom="" ' + \ + '--biascorrection={biascorrection} ' + \ + '--mctype="MCFLIRT"' cmd = cmd.format(**args) # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) - def run_generic_fMRI_surface_processsing(**args): print(args) args.update(os.environ) cmd = '{HCPPIPEDIR}/fMRISurface/GenericfMRISurfaceProcessingPipeline.sh ' + \ - '--path={path} ' + \ - '--subject={subject} ' + \ - '--fmriname={fmriname} ' + \ - '--lowresmesh="{lowresmesh:d}" ' + \ - '--fmrires={fmrires:s} ' + \ - '--smoothingFWHM={fmrires:s} ' + \ - '--grayordinatesres="{grayordinatesres:s}" ' + \ - '--regname="FS"' + '--path={path} ' + \ + '--subject={subject} ' + \ + '--fmriname={fmriname} ' + \ + '--lowresmesh="{lowresmesh:d}" ' + \ + '--fmrires={fmrires:s} ' + \ + '--smoothingFWHM={fmrires:s} ' + \ + '--grayordinatesres="{grayordinatesres:s}" ' + \ + '--regname="FS"' cmd = cmd.format(**args) # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) - def run_diffusion_processsing(**args): # print(args) args.update(os.environ) cmd = '{HCPPIPEDIR}/DiffusionPreprocessing/DiffPreprocPipeline.sh ' + \ - '--posData="{posData}" ' + \ - '--negData="{negData}" ' + \ - '--path="{path}" ' + \ - '--subject="{subject}" ' + \ - '--echospacing="{echospacing}" ' + \ - '--PEdir={PEdir} ' + \ - '--gdcoeffs="NONE" ' + \ - '--dwiname="{dwiname}" ' + \ - '--printcom=""' + '--posData="{posData}" ' +\ + '--negData="{negData}" ' + \ + '--path="{path}" ' + \ + '--subject="{subject}" ' + \ + '--echospacing="{echospacing}" '+ \ + '--PEdir={PEdir} ' + \ + '--gdcoeffs="NONE" ' + \ + '--dwiname="{dwiname}" ' + \ + '--printcom=""' + cmd = cmd.format(**args) + print('\n',cmd, '\n') + # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + +def generate_level1_fsf(**args): + print(args) + args.update(os.environ) + cmd = '{HCPPIPEDIR}/Examples/Scripts/generate_level_1_fsf_dev.sh ' + \ + '--studyfolder={studyfolder} ' + \ + '--subject={subject} ' + \ + '--taskname={taskname} ' + \ + '--templatedir={HCPPIPEDIR}/{templatedir} ' + \ + '--outdir={outdir} ' + \ + '--dir={dir}' + cmd = cmd.format(**args) + print('\n', cmd, '\n') + run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + +def generate_level2_fsf(**args): + print(args) + args.update(os.environ) + cmd = '{HCPPIPEDIR}/Examples/Scripts/generate_level_2_fsf_dev.sh ' + \ + '--studyfolder={studyfolder} ' + \ + '--subject={subject} ' + \ + '--taskname={taskname} ' + \ + '--templatedir={HCPPIPEDIR}/{templatedir} ' + \ + '--outdir={outdir} ' cmd = cmd.format(**args) print('\n', cmd, '\n') run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) +def run_task_fmri_analysis(**args): + print(args) + args.update(os.environ) + cmd = '{HCPPIPEDIR}/TaskfMRIAnalysis/TaskfMRIAnalysis.sh ' + \ + '--path={path} ' + \ + '--subject={subject} ' + \ + '--lvl1tasks={lvl1tasks} ' + \ + '--lvl1fsfs={lvl1fsfs} ' + \ + '--lvl2task={lvl2task} ' + \ + '--lvl2fsf={lvl2fsf} ' + \ + '--lowresmesh={lowresmesh} ' + \ + '--grayordinatesres="{grayordinatesres:s}" ' + \ + '--confound={confound} ' + \ + '--finalsmoothingFWHM={finalsmoothingFWHM} ' + \ + '--temporalfilter={temporalfilter} ' + \ + '--vba={vba} ' + \ + '--regname={regname} ' + \ + '--parcellation={parcellation} ' + \ + '--parcellationfile={parcellationfile} ' + \ + '--printcom=""' + cmd = cmd.format(**args) + print('\n', cmd, '\n') + run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) __version__ = open('/version').read() -parser = argparse.ArgumentParser(description='HCP Pipeliens BIDS App (T1w, T2w, fMRI)') +parser = argparse.ArgumentParser(description='HCP Pipelines BIDS App (T1w, T2w, fMRI)') parser.add_argument('bids_dir', help='The directory with the input dataset ' - 'formatted according to the BIDS standard.') + 'formatted according to the BIDS standard.') parser.add_argument('output_dir', help='The directory where the output files ' - 'should be stored. If you are running group level analysis ' - 'this folder should be prepopulated with the results of the' - 'participant level analysis.') + 'should be stored. If you are running group level analysis ' + 'this folder should be prepopulated with the results of the' + 'participant level analysis.') parser.add_argument('analysis_level', help='Level of the analysis that will be performed. ' - 'Multiple participant level analyses can be run independently ' - '(in parallel) using the same output_dir.', + 'Multiple participant level analyses can be run independently ' + '(in parallel) using the same output_dir.', choices=['participant']) parser.add_argument('--participant_label', help='The label of the participant that should be analyzed. The label ' - 'corresponds to sub- from the BIDS spec ' - '(so it does not include "sub-"). If this parameter is not ' - 'provided all subjects should be analyzed. Multiple ' - 'participants can be specified with a space separated list.', - nargs="+") + 'corresponds to sub- from the BIDS spec ' + '(so it does not include "sub-"). If this parameter is not ' + 'provided all subjects should be analyzed. Multiple ' + 'participants can be specified with a space separated list.', + nargs="+") parser.add_argument('--n_cpus', help='Number of CPUs/cores available to use.', - default=1, type=int) + default=1, type=int) parser.add_argument('--stages', help='Which stages to run. Space separated list.', - nargs="+", choices=['PreFreeSurfer', 'FreeSurfer', - 'PostFreeSurfer', 'fMRIVolume', - 'fMRISurface', 'DiffusionPreprocessing'], - default=['PreFreeSurfer', 'FreeSurfer', 'PostFreeSurfer', - 'fMRIVolume', 'fMRISurface', - 'DiffusionPreprocessing']) -parser.add_argument('--license_key', - help='FreeSurfer license key - letters and numbers after "*" in the email you received after registration. To register (for free) visit https://surfer.nmr.mgh.harvard.edu/registration.html', + nargs="+", choices=['PreFreeSurfer', 'FreeSurfer', + 'PostFreeSurfer', 'fMRIVolume', + 'fMRISurface', 'DiffusionPreprocessing'], + default=['PreFreeSurfer', 'FreeSurfer', 'PostFreeSurfer', + 'fMRIVolume', 'fMRISurface', + 'DiffusionPreprocessing', 'TaskfMRIAnalysis']) +parser.add_argument('--license_key', help='FreeSurfer license key - letters and numbers after "*" in the email you received after registration. To register (for free) visit https://surfer.nmr.mgh.harvard.edu/registration.html', required=True) parser.add_argument('-v', '--version', action='version', version='HCP Pielines BIDS App version {}'.format(__version__)) @@ -238,16 +279,16 @@ def run_diffusion_processsing(**args): t2ws = [f.filename for f in layout.get(subject=subject_label, type='T2w', extensions=["nii.gz", "nii"])] - assert (len(t1ws) > 0), "No T1w files found for subject %s!" % subject_label - assert (len(t2ws) > 0), "No T2w files found for subject %s!" % subject_label + assert (len(t1ws) > 0), "No T1w files found for subject %s!"%subject_label + assert (len(t2ws) > 0), "No T2w files found for subject %s!"%subject_label available_resolutions = [0.7, 0.8, 1.0] t1_zooms = nibabel.load(t1ws[0]).get_header().get_zooms() t1_res = float(min(t1_zooms[:3])) - t1_template_res = min(available_resolutions, key=lambda x: abs(x - t1_res)) + t1_template_res = min(available_resolutions, key=lambda x:abs(x-t1_res)) t2_zooms = nibabel.load(t2ws[0]).get_header().get_zooms() t2_res = float(min(t2_zooms[:3])) - t2_template_res = min(available_resolutions, key=lambda x: abs(x - t2_res)) + t2_template_res = min(available_resolutions, key=lambda x:abs(x-t2_res)) fieldmap_set = layout.get_fieldmap(t1ws[0]) fmap_args = {"fmapmag": "NONE", @@ -267,30 +308,30 @@ def run_diffusion_processsing(**args): t2_spacing = layout.get_metadata(t2ws[0])["RealDwellTime"] unwarpdir = layout.get_metadata(t1ws[0])["PhaseEncodingDirection"] - unwarpdir = unwarpdir.replace("i", "x").replace("j", "y").replace("k", "z") + unwarpdir = unwarpdir.replace("i","x").replace("j", "y").replace("k", "z") if len(unwarpdir) == 2: unwarpdir = "-" + unwarpdir[0] - fmap_args.update({"t1samplespacing": "%.8f" % t1_spacing, - "t2samplespacing": "%.8f" % t2_spacing, + fmap_args.update({"t1samplespacing": "%.8f"%t1_spacing, + "t2samplespacing": "%.8f"%t2_spacing, "unwarpdir": unwarpdir}) if fieldmap_set["type"] == "phasediff": - merged_file = "%s/tmp/%s/magfile.nii.gz" % (args.output_dir, subject_label) - run("mkdir -p %s/tmp/%s/ && fslmerge -t %s %s %s" % (args.output_dir, - subject_label, - merged_file, - fieldmap_set["magnitude1"], - fieldmap_set["magnitude2"])) + merged_file = "%s/tmp/%s/magfile.nii.gz"%(args.output_dir, subject_label) + run("mkdir -p %s/tmp/%s/ && fslmerge -t %s %s %s"%(args.output_dir, + subject_label, + merged_file, + fieldmap_set["magnitude1"], + fieldmap_set["magnitude2"])) phasediff_metadata = layout.get_metadata(fieldmap_set["phasediff"]) te_diff = phasediff_metadata["EchoTime2"] - phasediff_metadata["EchoTime1"] # HCP expects TE in miliseconds - te_diff = te_diff * 1000.0 + te_diff = te_diff*1000.0 fmap_args.update({"fmapmag": merged_file, "fmapphase": fieldmap_set["phasediff"], - "echodiff": "%.6f" % te_diff, + "echodiff": "%.6f"%te_diff, "avgrdcmethod": "SiemensFieldMap"}) elif fieldmap_set["type"] == "epi": SEPhaseNeg = None @@ -303,9 +344,9 @@ def run_diffusion_processsing(**args): SEPhasePos = fieldmap seunwarpdir = layout.get_metadata(fieldmap_set["epi"][0])["PhaseEncodingDirection"] - seunwarpdir = seunwarpdir.replace("-", "").replace("i", "x").replace("j", "y").replace("k", "z") + seunwarpdir = seunwarpdir.replace("-", "").replace("i","x").replace("j", "y").replace("k", "z") - # TODO check consistency of echo spacing instead of assuming it's all the same + #TODO check consistency of echo spacing instead of assuming it's all the same if "EffectiveEchoSpacing" in layout.get_metadata(fieldmap_set["epi"][0]): echospacing = layout.get_metadata(fieldmap_set["epi"][0])["EffectiveEchoSpacing"] elif "TotalReadoutTime" in layout.get_metadata(fieldmap_set["epi"][0]): @@ -319,36 +360,35 @@ def run_diffusion_processsing(**args): phase_len = nibabel.load(fieldmap_set["epi"][0]).shape[{"x": 0, "y": 1}[seunwarpdir]] echospacing = TotalReadoutTime / float(phase_len - 1) else: - raise RuntimeError( - "EffectiveEchoSpacing or TotalReadoutTime defined for the fieldmap intended for T1w image. Please fix your BIDS dataset.") + raise RuntimeError("EffectiveEchoSpacing or TotalReadoutTime defined for the fieldmap intended for T1w image. Please fix your BIDS dataset.") fmap_args.update({"SEPhaseNeg": SEPhaseNeg, "SEPhasePos": SEPhasePos, - "echospacing": "%.6f" % echospacing, + "echospacing": "%.6f"%echospacing, "seunwarpdir": seunwarpdir, "avgrdcmethod": "TOPUP"}) - # TODO add support for GE fieldmaps + #TODO add support for GE fieldmaps struct_stages_dict = OrderedDict([("PreFreeSurfer", partial(run_pre_freesurfer, - path=args.output_dir, - subject="sub-%s" % subject_label, - t1ws=t1ws, - t2ws=t2ws, - n_cpus=args.n_cpus, - t1_template_res=t1_template_res, - t2_template_res=t2_template_res, - **fmap_args)), - ("FreeSurfer", partial(run_freesurfer, - path=args.output_dir, - subject="sub-%s" % subject_label, - n_cpus=args.n_cpus)), - ("PostFreeSurfer", partial(run_post_freesurfer, - path=args.output_dir, - subject="sub-%s" % subject_label, - grayordinatesres=grayordinatesres, - lowresmesh=lowresmesh, - n_cpus=args.n_cpus)) - ]) + path=args.output_dir, + subject="sub-%s"%subject_label, + t1ws=t1ws, + t2ws=t2ws, + n_cpus=args.n_cpus, + t1_template_res=t1_template_res, + t2_template_res=t2_template_res, + **fmap_args)), + ("FreeSurfer", partial(run_freesurfer, + path=args.output_dir, + subject="sub-%s"%subject_label, + n_cpus=args.n_cpus)), + ("PostFreeSurfer", partial(run_post_freesurfer, + path=args.output_dir, + subject="sub-%s"%subject_label, + grayordinatesres=grayordinatesres, + lowresmesh=lowresmesh, + n_cpus=args.n_cpus)) + ]) for stage, stage_func in struct_stages_dict.iteritems(): if stage in args.stages: stage_func() @@ -357,7 +397,7 @@ def run_diffusion_processsing(**args): type='bold', extensions=["nii.gz", "nii"])] for fmritcs in bolds: - fmriname = fmritcs.split("task-")[1].split("_")[0] + fmriname = "_".join(bolds[0].split("sub-")[-1].split("_")[1:-1]).split(".")[0] assert fmriname fmriscout = fmritcs.replace("_bold", "_sbref") @@ -377,7 +417,7 @@ def run_diffusion_processsing(**args): SEPhasePos = fieldmap echospacing = layout.get_metadata(fmritcs)["EffectiveEchoSpacing"] unwarpdir = layout.get_metadata(fmritcs)["PhaseEncodingDirection"] - unwarpdir = unwarpdir.replace("i", "x").replace("j", "y").replace("k", "z") + unwarpdir = unwarpdir.replace("i","x").replace("j", "y").replace("k", "z") if len(unwarpdir) == 2: unwarpdir = "-" + unwarpdir[0] dcmethod = "TOPUP" @@ -392,34 +432,34 @@ def run_diffusion_processsing(**args): zooms = nibabel.load(fmritcs).get_header().get_zooms() fmrires = float(min(zooms[:3])) - fmrires = "2" # https://github.com/Washington-University/Pipelines/blob/637b35f73697b77dcb1d529902fc55f431f03af7/fMRISurface/scripts/SubcorticalProcessing.sh#L43 + fmrires = "2" # https://github.com/Washington-University/Pipelines/blob/637b35f73697b77dcb1d529902fc55f431f03af7/fMRISurface/scripts/SubcorticalProcessing.sh#L43 # While running '/usr/bin/wb_command -cifti-create-dense-timeseries /scratch/users/chrisgor/hcp_output2/sub-100307/MNINonLinear/Results/EMOTION/EMOTION_temp_subject.dtseries.nii -volume /scratch/users/chrisgor/hcp_output2/sub-100307/MNINonLinear/Results/EMOTION/EMOTION.nii.gz /scratch/users/chrisgor/hcp_output2/sub-100307/MNINonLinear/ROIs/ROIs.2.nii.gz': # ERROR: label volume has a different volume space than data volume func_stages_dict = OrderedDict([("fMRIVolume", partial(run_generic_fMRI_volume_processsing, - path=args.output_dir, - subject="sub-%s" % subject_label, - fmriname=fmriname, - fmritcs=fmritcs, - fmriscout=fmriscout, - SEPhaseNeg=SEPhaseNeg, - SEPhasePos=SEPhasePos, - echospacing=echospacing, - unwarpdir=unwarpdir, - fmrires=fmrires, - dcmethod=dcmethod, - biascorrection=biascorrection, - n_cpus=args.n_cpus)), - ("fMRISurface", partial(run_generic_fMRI_surface_processsing, - path=args.output_dir, - subject="sub-%s" % subject_label, - fmriname=fmriname, - fmrires=fmrires, - n_cpus=args.n_cpus, - grayordinatesres=grayordinatesres, - lowresmesh=lowresmesh)) - ]) + path=args.output_dir, + subject="sub-%s"%subject_label, + fmriname=fmriname, + fmritcs=fmritcs, + fmriscout=fmriscout, + SEPhaseNeg=SEPhaseNeg, + SEPhasePos=SEPhasePos, + echospacing=echospacing, + unwarpdir=unwarpdir, + fmrires=fmrires, + dcmethod=dcmethod, + biascorrection=biascorrection, + n_cpus=args.n_cpus)), + ("fMRISurface", partial(run_generic_fMRI_surface_processsing, + path=args.output_dir, + subject="sub-%s"%subject_label, + fmriname=fmriname, + fmrires=fmrires, + n_cpus=args.n_cpus, + grayordinatesres=grayordinatesres, + lowresmesh=lowresmesh)) + ]) for stage, stage_func in func_stages_dict.iteritems(): if stage in args.stages: stage_func() @@ -449,7 +489,7 @@ def run_diffusion_processsing(**args): y = ''.join(y) dirnums.append(y) for dirnum in set(dirnums): - dwiname = "Diffusion" + "_acq-dir" + dirnum + "_" + session + dwiname = "Diffusion" + "_dir-" + dirnum + "_" + session + "_corr" for acq in acqs: if "AP" or "PA" in acqs: PEdir = 2 From 6037f366127035c5332a171f09caa3402d8dd2b3 Mon Sep 17 00:00:00 2001 From: Yeun Kim Date: Mon, 5 Dec 2016 09:51:21 -0800 Subject: [PATCH 05/14] uncomment diff function --- run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/run.py b/run.py index e697c19..e8bd3af 100644 --- a/run.py +++ b/run.py @@ -167,7 +167,7 @@ def run_diffusion_processsing(**args): '--printcom=""' cmd = cmd.format(**args) print('\n',cmd, '\n') - # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) def generate_level1_fsf(**args): print(args) From 31b460b3b8492686c13c499344c0db0567f5b01b Mon Sep 17 00:00:00 2001 From: yeunkim Date: Wed, 25 Jan 2017 16:25:16 -0800 Subject: [PATCH 06/14] bug fixed --- Dockerfile | 13 +++++++++++++ run.py | 20 ++++++++++---------- 2 files changed, 23 insertions(+), 10 deletions(-) diff --git a/Dockerfile b/Dockerfile index 4c0c87b..f5384fc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -64,6 +64,19 @@ RUN apt-get update && \ apt-get install -y nodejs && \ rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* +# Install and compile dicm2niibatch +RUN apt-get update && \ + apt-get install unzip && \ + wget https://github.com/rordenlab/dcm2niix/archive/master.zip && \ + cd / && unzip /master.zip && \ + cd dcm2niix-master + +RUN apt-get update && \ + apt-get --assume-yes install pkg-config libyaml-cpp-dev libyaml-cpp0.5 cmake libboost-dev && \ + mkdir build && cd build && \ + cmake .. && \ + make + RUN npm install -g bids-validator@0.19.2 COPY run.py /run.py diff --git a/run.py b/run.py index e8bd3af..2bc6724 100644 --- a/run.py +++ b/run.py @@ -66,7 +66,7 @@ def run_pre_freesurfer(**args): '--topupconfig="{HCPPIPEDIR_Config}/b02b0.cnf" ' + \ '--printcom=""' cmd = cmd.format(**args) - # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) def run_freesurfer(**args): args.update(os.environ) @@ -90,8 +90,8 @@ def run_freesurfer(**args): shutil.copytree(os.path.join(os.environ["SUBJECTS_DIR"], "rh.EC_average"), os.path.join(args["subjectDIR"], "rh.EC_average")) - # run(cmd, cwd=args["path"], env={"NSLOTS": str(args["n_cpus"]), - # "OMP_NUM_THREADS": str(args["n_cpus"])}) + run(cmd, cwd=args["path"], env={"NSLOTS": str(args["n_cpus"]), + "OMP_NUM_THREADS": str(args["n_cpus"])}) def run_post_freesurfer(**args): args.update(os.environ) @@ -109,7 +109,7 @@ def run_post_freesurfer(**args): '--regname="FS" ' + \ '--printcom=""' cmd = cmd.format(**args) - # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) def run_generic_fMRI_volume_processsing(**args): args.update(os.environ) @@ -135,7 +135,7 @@ def run_generic_fMRI_volume_processsing(**args): '--biascorrection={biascorrection} ' + \ '--mctype="MCFLIRT"' cmd = cmd.format(**args) - # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) def run_generic_fMRI_surface_processsing(**args): print(args) @@ -150,7 +150,7 @@ def run_generic_fMRI_surface_processsing(**args): '--grayordinatesres="{grayordinatesres:s}" ' + \ '--regname="FS"' cmd = cmd.format(**args) - # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) def run_diffusion_processsing(**args): # print(args) @@ -181,7 +181,7 @@ def generate_level1_fsf(**args): '--dir={dir}' cmd = cmd.format(**args) print('\n', cmd, '\n') - run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) def generate_level2_fsf(**args): print(args) @@ -194,7 +194,7 @@ def generate_level2_fsf(**args): '--outdir={outdir} ' cmd = cmd.format(**args) print('\n', cmd, '\n') - run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) def run_task_fmri_analysis(**args): print(args) @@ -218,7 +218,7 @@ def run_task_fmri_analysis(**args): '--printcom=""' cmd = cmd.format(**args) print('\n', cmd, '\n') - run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) + # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) __version__ = open('/version').read() @@ -397,7 +397,7 @@ def run_task_fmri_analysis(**args): type='bold', extensions=["nii.gz", "nii"])] for fmritcs in bolds: - fmriname = "_".join(bolds[0].split("sub-")[-1].split("_")[1:-1]).split(".")[0] + fmriname = "_".join(fmritcs.split("sub-")[-1].split("_")[1:-1]).split(".")[0] assert fmriname fmriscout = fmritcs.replace("_bold", "_sbref") From 6c256dbc253b6f4046041c5ceef12fc2f754bd52 Mon Sep 17 00:00:00 2001 From: Yeun Kim Date: Fri, 27 Jan 2017 14:34:30 -0800 Subject: [PATCH 07/14] changed Dockerfile (npm update bug fixed) --- Dockerfile | 81 ++++++++++++++++++++++++++++++++++++------------------ 1 file changed, 55 insertions(+), 26 deletions(-) diff --git a/Dockerfile b/Dockerfile index f5384fc..7c5fe24 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,12 +1,61 @@ -FROM bids/base_freesurfer +# Use Ubuntu 16.04 LTS +FROM ubuntu:xenial-20161213 + +## Install the validator +RUN apt-get update && \ + apt-get install -y curl && \ + curl -sL https://deb.nodesource.com/setup_6.x | bash - && \ + apt-get remove -y curl && \ + apt-get install -y nodejs && \ + rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + +RUN npm install -g bids-validator@0.19.2 + +# Download FreeSurfer +RUN apt-get -y update \ + && apt-get install -y wget && \ + wget -qO- ftp://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/5.3.0-HCP/freesurfer-Linux-centos4_x86_64-stable-pub-v5.3.0-HCP.tar.gz | tar zxv -C /opt \ + --exclude='freesurfer/trctrain' \ + --exclude='freesurfer/subjects/fsaverage_sym' \ + --exclude='freesurfer/subjects/fsaverage3' \ + --exclude='freesurfer/subjects/fsaverage4' \ + --exclude='freesurfer/subjects/fsaverage5' \ + --exclude='freesurfer/subjects/fsaverage6' \ + --exclude='freesurfer/subjects/cvs_avg35' \ + --exclude='freesurfer/subjects/cvs_avg35_inMNI152' \ + --exclude='freesurfer/subjects/bert' \ + --exclude='freesurfer/subjects/V1_average' \ + --exclude='freesurfer/average/mult-comp-cor' \ + --exclude='freesurfer/lib/cuda' \ + --exclude='freesurfer/lib/qt' && \ + apt-get install -y tcsh bc tar libgomp1 perl-modules curl && \ + rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + +# Set up the environment +ENV OS Linux +ENV FS_OVERRIDE 0 +ENV FIX_VERTEX_AREA= +ENV SUBJECTS_DIR /opt/freesurfer/subjects +ENV FSF_OUTPUT_FORMAT nii.gz +ENV MNI_DIR /opt/freesurfer/mni +ENV LOCAL_DIR /opt/freesurfer/local +ENV FREESURFER_HOME /opt/freesurfer +ENV FSFAST_HOME /opt/freesurfer/fsfast +ENV MINC_BIN_DIR /opt/freesurfer/mni/bin +ENV MINC_LIB_DIR /opt/freesurfer/mni/lib +ENV MNI_DATAPATH /opt/freesurfer/mni/data +ENV FMRI_ANALYSIS_DIR /opt/freesurfer/fsfast +ENV PERL5LIB /opt/freesurfer/mni/lib/perl5/5.8.5 +ENV MNI_PERL5LIB /opt/freesurfer/mni/lib/perl5/5.8.5 +ENV PATH /opt/freesurfer/bin:/opt/freesurfer/fsfast/bin:/opt/freesurfer/tktools:/opt/freesurfer/mni/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:$PATH # Install FSL 5.0.9 RUN apt-get update && \ apt-get install -y --no-install-recommends curl && \ - curl -sSL http://neuro.debian.net/lists/trusty.us-ca.full >> /etc/apt/sources.list.d/neurodebian.sources.list && \ + curl -sSL http://neuro.debian.net/lists/xenial.us-ca.full >> /etc/apt/sources.list.d/neurodebian.sources.list && \ apt-key adv --recv-keys --keyserver hkp://pgp.mit.edu:80 0xA5D32F012649A5A9 && \ apt-get update && \ - apt-get install -y fsl-core=5.0.9-3~nd14.04+1 && \ + apt-get install -y fsl-core=5.0.9-1~nd+1+nd16.04+1 && \ rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* # Configure environment @@ -23,7 +72,7 @@ ENV FSLOUTPUTTYPE=NIFTI_GZ RUN echo "cHJpbnRmICJrcnp5c3p0b2YuZ29yZ29sZXdza2lAZ21haWwuY29tXG41MTcyXG4gKkN2dW12RVYzelRmZ1xuRlM1Si8yYzFhZ2c0RVxuIiA+IC9vcHQvZnJlZXN1cmZlci9saWNlbnNlLnR4dAo=" | base64 -d | sh # Install Connectome Workbench -RUN apt-get update && apt-get -y install connectome-workbench=1.2.3-1~nd14.04+1 +RUN apt-get update && apt-get -y install connectome-workbench=1.2.3-1~nd16.04+1 ENV CARET7DIR=/usr/bin @@ -54,31 +103,11 @@ ENV HCPPIPEDIR_Global=${HCPPIPEDIR}/global/scripts ENV HCPPIPEDIR_tfMRIAnalysis=${HCPPIPEDIR}/TaskfMRIAnalysis/scripts ENV MSMBin=${HCPPIPEDIR}/MSMBinaries -RUN apt-get update && apt-get install -y --no-install-recommends python-pip python-six python-nibabel && \ +RUN apt-get update && apt-get install -y --no-install-recommends python-pip python-six python-nibabel python-setuptools && \ rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -RUN pip install https://github.com/INCF/pybids/archive/158dac2062dc6b5a4ab2f92090108eedc3387575.zip +RUN pip install pybids==0.0.1 ENV PYTHONPATH="" -RUN apt-get update && \ - curl -sL https://deb.nodesource.com/setup_4.x | bash - && \ - apt-get install -y nodejs && \ - rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* - -# Install and compile dicm2niibatch -RUN apt-get update && \ - apt-get install unzip && \ - wget https://github.com/rordenlab/dcm2niix/archive/master.zip && \ - cd / && unzip /master.zip && \ - cd dcm2niix-master - -RUN apt-get update && \ - apt-get --assume-yes install pkg-config libyaml-cpp-dev libyaml-cpp0.5 cmake libboost-dev && \ - mkdir build && cd build && \ - cmake .. && \ - make - -RUN npm install -g bids-validator@0.19.2 - COPY run.py /run.py RUN chmod +x /run.py From 86926c2d47985a54acf1a549a43d0452fc19dc08 Mon Sep 17 00:00:00 2001 From: yeunkim Date: Wed, 22 Mar 2017 16:27:34 -0700 Subject: [PATCH 08/14] fixed repetitive for loops --- run.py | 80 ++++++++++++++++++++++++++++------------------------------ 1 file changed, 38 insertions(+), 42 deletions(-) diff --git a/run.py b/run.py index 2bc6724..92ff50f 100644 --- a/run.py +++ b/run.py @@ -490,49 +490,45 @@ def run_task_fmri_analysis(**args): dirnums.append(y) for dirnum in set(dirnums): dwiname = "Diffusion" + "_dir-" + dirnum + "_" + session + "_corr" - for acq in acqs: - if "AP" or "PA" in acqs: - PEdir = 2 - elif "LR" or "RL" in acqs: - PEdir = 1 + + diracqs = [x for x in acqs if dirnum in x] + if "AP" or "PA" in diracqs: + PEdir = 2 + elif "LR" or "RL" in diracqs: + PEdir = 1 + else: + RuntimeError("Acquisition direction not specified on dwi file") + pos = "EMPTY" + neg = "EMPTY" + gdcoeffs = "None" + dwis = layout.get(subject=subject_label, + type='dwi', acquisition=dirnum, run=session, + extensions=["nii.gz", "nii"]) + + assert len(dwis) <= 2 + for dwi in dwis: + dwi = dwi.filename + if "-" in layout.get_metadata(dwi)["PhaseEncodingDirection"]: + neg = dwi + # negData.append(neg) else: - RuntimeError("Acquisition direction not specified on dwi file") - pos = "EMPTY" - neg = "EMPTY" - gdcoeffs = "None" - dwis = layout.get(subject=subject_label, - type='dwi', acquisition=acq, run=session, - extensions=["nii.gz", "nii"]) - assert len(dwis) <= 2 - for dwi in dwis: - dwi = dwi.filename - if "-" in layout.get_metadata(dwi)["PhaseEncodingDirection"]: - neg = dwi - negData.append(neg) - else: - pos = dwi - posData.append(pos) - - # print(negData) - # print(posData) - # print(dwiname) - - for posfile, negfile in zip(posData, negData): - echospacing = layout.get_metadata(posfile)["EffectiveEchoSpacing"] * 1000 - dwi_stage_dict = OrderedDict([("DiffusionPreprocessing", partial(run_diffusion_processsing, - posData=posfile, - negData=negfile, - path=args.output_dir, - subject="sub-%s" % subject_label, - echospacing=echospacing, - PEdir=PEdir, - gdcoeffs="NONE", - dwiname=dwiname, - n_cpus=args.n_cpus))]) - - for stage, stage_func in dwi_stage_dict.iteritems(): - if stage in args.stages: - stage_func() + pos = dwi + # posData.append(pos) + + echospacing = layout.get_metadata(pos)["EffectiveEchoSpacing"] * 1000 + dwi_stage_dict = OrderedDict([("DiffusionPreprocessing", partial(run_diffusion_processsing, + posData=pos, + negData=neg, + path=args.output_dir, + subject="sub-%s" % subject_label, + echospacing=echospacing, + PEdir=PEdir, + gdcoeffs="NONE", + dwiname=dwiname, + n_cpus=args.n_cpus))]) + for stage, stage_func in dwi_stage_dict.iteritems(): + if stage in args.stages: + stage_func() From 15ed8371b33f4e5ce6914f998fa1a3759e3c438c Mon Sep 17 00:00:00 2001 From: Yeun Kim Date: Mon, 10 Apr 2017 23:29:02 -0700 Subject: [PATCH 09/14] Remove .idea from repo From 0b410595576822649b602d353e51da81572df688 Mon Sep 17 00:00:00 2001 From: Yeun Kim Date: Sun, 16 Apr 2017 00:19:58 -0700 Subject: [PATCH 10/14] test drive --- run.py | 135 ++++++++++++++++++++------------------------------------- 1 file changed, 48 insertions(+), 87 deletions(-) diff --git a/run.py b/run.py index 92ff50f..16fa68e 100644 --- a/run.py +++ b/run.py @@ -138,7 +138,7 @@ def run_generic_fMRI_volume_processsing(**args): run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) def run_generic_fMRI_surface_processsing(**args): - print(args) + # print(args) args.update(os.environ) cmd = '{HCPPIPEDIR}/fMRISurface/GenericfMRISurfaceProcessingPipeline.sh ' + \ '--path={path} ' + \ @@ -169,56 +169,6 @@ def run_diffusion_processsing(**args): print('\n',cmd, '\n') run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) -def generate_level1_fsf(**args): - print(args) - args.update(os.environ) - cmd = '{HCPPIPEDIR}/Examples/Scripts/generate_level_1_fsf_dev.sh ' + \ - '--studyfolder={studyfolder} ' + \ - '--subject={subject} ' + \ - '--taskname={taskname} ' + \ - '--templatedir={HCPPIPEDIR}/{templatedir} ' + \ - '--outdir={outdir} ' + \ - '--dir={dir}' - cmd = cmd.format(**args) - print('\n', cmd, '\n') - # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) - -def generate_level2_fsf(**args): - print(args) - args.update(os.environ) - cmd = '{HCPPIPEDIR}/Examples/Scripts/generate_level_2_fsf_dev.sh ' + \ - '--studyfolder={studyfolder} ' + \ - '--subject={subject} ' + \ - '--taskname={taskname} ' + \ - '--templatedir={HCPPIPEDIR}/{templatedir} ' + \ - '--outdir={outdir} ' - cmd = cmd.format(**args) - print('\n', cmd, '\n') - # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) - -def run_task_fmri_analysis(**args): - print(args) - args.update(os.environ) - cmd = '{HCPPIPEDIR}/TaskfMRIAnalysis/TaskfMRIAnalysis.sh ' + \ - '--path={path} ' + \ - '--subject={subject} ' + \ - '--lvl1tasks={lvl1tasks} ' + \ - '--lvl1fsfs={lvl1fsfs} ' + \ - '--lvl2task={lvl2task} ' + \ - '--lvl2fsf={lvl2fsf} ' + \ - '--lowresmesh={lowresmesh} ' + \ - '--grayordinatesres="{grayordinatesres:s}" ' + \ - '--confound={confound} ' + \ - '--finalsmoothingFWHM={finalsmoothingFWHM} ' + \ - '--temporalfilter={temporalfilter} ' + \ - '--vba={vba} ' + \ - '--regname={regname} ' + \ - '--parcellation={parcellation} ' + \ - '--parcellationfile={parcellationfile} ' + \ - '--printcom=""' - cmd = cmd.format(**args) - print('\n', cmd, '\n') - # run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) __version__ = open('/version').read() @@ -464,45 +414,56 @@ def run_task_fmri_analysis(**args): if stage in args.stages: stage_func() - # dwis = layout.get(subject=subject_label, type='dwi', - # extensions=["nii.gz", "nii"]) - - # print(dwis) - posData = [] negData = [] PEdir = "None" dwiname = "Diffusion" dirnums = [] + dmris = [f.filename for f in layout.get(subject=subject_label, + type='dwi', + extensions=["nii.gz", "nii"])] + + bvals = [f.filename for f in layout.get(subject=subject_label, + type='dwi', + extensions=["bval"])] + numruns = set(layout.get(target='run', return_type='id', subject=subject_label, type='dwi', extensions=["nii.gz", "nii"])) + for bval in bvals: + with open(bval) as f: + bvalues = [bvalue for line in f for bvalue in line.split()] + dirnums.append(len(bvalues)-1) + + for session in numruns: - acqs = set(layout.get(target='acquisition', return_type='id', - subject=subject_label, type='dwi', - extensions=["nii.gz", "nii"])) - for acq in acqs: - y = [int(s) for s in acq[0:len(acq)] if s.isdigit()] - y = [str(s) for s in y] - y = ''.join(y) - dirnums.append(y) + acqs = set([layout.get_metadata(f)["PhaseEncodingDirection"] for f in dmris]) + # acqs = set(layout.get(target='acquisition', return_type='id', + # subject=subject_label, type='dwi', + # extensions=["nii.gz", "nii"])) + + # for acq in acqs: + # y = [int(s) for s in acq[0:len(acq)] if s.isdigit()] + # y = [str(s) for s in y] + # y = ''.join(y) + # dirnums.append(y) for dirnum in set(dirnums): - dwiname = "Diffusion" + "_dir-" + dirnum + "_" + session + "_corr" + dwiname = "Diffusion" + "_dir-" + str(dirnum) + "_" + session + "_corr" - diracqs = [x for x in acqs if dirnum in x] - if "AP" or "PA" in diracqs: + # diracqs = [x for x in acqs if dirnum in x] + if "j" in acqs: PEdir = 2 - elif "LR" or "RL" in diracqs: + elif "i" in acqs: PEdir = 1 else: - RuntimeError("Acquisition direction not specified on dwi file") + RuntimeError("Phase encoding direction not specified for diffusion data.") pos = "EMPTY" neg = "EMPTY" gdcoeffs = "None" dwis = layout.get(subject=subject_label, - type='dwi', acquisition=dirnum, run=session, + type='dwi', acquisition=str(dirnum), run=session, extensions=["nii.gz", "nii"]) assert len(dwis) <= 2 @@ -510,25 +471,25 @@ def run_task_fmri_analysis(**args): dwi = dwi.filename if "-" in layout.get_metadata(dwi)["PhaseEncodingDirection"]: neg = dwi - # negData.append(neg) else: pos = dwi - # posData.append(pos) - - echospacing = layout.get_metadata(pos)["EffectiveEchoSpacing"] * 1000 - dwi_stage_dict = OrderedDict([("DiffusionPreprocessing", partial(run_diffusion_processsing, - posData=pos, - negData=neg, - path=args.output_dir, - subject="sub-%s" % subject_label, - echospacing=echospacing, - PEdir=PEdir, - gdcoeffs="NONE", - dwiname=dwiname, - n_cpus=args.n_cpus))]) - for stage, stage_func in dwi_stage_dict.iteritems(): - if stage in args.stages: - stage_func() + try: + echospacing = layout.get_metadata(pos)["EffectiveEchoSpacing"] * 1000 + dwi_stage_dict = OrderedDict([("DiffusionPreprocessing", partial(run_diffusion_processsing, + posData=pos, + negData=neg, + path=args.output_dir, + subject="sub-%s" % subject_label, + echospacing=echospacing, + PEdir=PEdir, + gdcoeffs="NONE", + dwiname=dwiname, + n_cpus=args.n_cpus))]) + for stage, stage_func in dwi_stage_dict.iteritems(): + if stage in args.stages: + stage_func() + except: + print("You may be missing a diffusion data set with positive phase encoding direction.") From 6aa43edc3eb221fb0847b966a4791c108d00b14d Mon Sep 17 00:00:00 2001 From: Yeun Kim Date: Sun, 16 Apr 2017 00:22:04 -0700 Subject: [PATCH 11/14] test drive From 0eae829d328fb65bc34b82a036f1e608ca3390e3 Mon Sep 17 00:00:00 2001 From: Yeun Kim Date: Thu, 20 Apr 2017 10:36:07 -0700 Subject: [PATCH 12/14] uses metadata for processing --- run.py | 136 ++++++++++++++++++++++++++++----------------------------- 1 file changed, 67 insertions(+), 69 deletions(-) diff --git a/run.py b/run.py index 16fa68e..7b11d76 100644 --- a/run.py +++ b/run.py @@ -355,7 +355,7 @@ def run_diffusion_processsing(**args): fmriscout = "NONE" fieldmap_set = layout.get_fieldmap(fmritcs) - print(fieldmap_set) + # print(fieldmap_set) if fieldmap_set and fieldmap_set["type"] == "epi": SEPhaseNeg = None SEPhasePos = None @@ -419,77 +419,75 @@ def run_diffusion_processsing(**args): PEdir = "None" dwiname = "Diffusion" dirnums = [] - - dmris = [f.filename for f in layout.get(subject=subject_label, - type='dwi', - extensions=["nii.gz", "nii"])] - - bvals = [f.filename for f in layout.get(subject=subject_label, - type='dwi', - extensions=["bval"])] + onerun = False numruns = set(layout.get(target='run', return_type='id', subject=subject_label, type='dwi', extensions=["nii.gz", "nii"])) - - for bval in bvals: - with open(bval) as f: - bvalues = [bvalue for line in f for bvalue in line.split()] - dirnums.append(len(bvalues)-1) - - - for session in numruns: - acqs = set([layout.get_metadata(f)["PhaseEncodingDirection"] for f in dmris]) - # acqs = set(layout.get(target='acquisition', return_type='id', - # subject=subject_label, type='dwi', - # extensions=["nii.gz", "nii"])) - - # for acq in acqs: - # y = [int(s) for s in acq[0:len(acq)] if s.isdigit()] - # y = [str(s) for s in y] - # y = ''.join(y) - # dirnums.append(y) - for dirnum in set(dirnums): - dwiname = "Diffusion" + "_dir-" + str(dirnum) + "_" + session + "_corr" - - # diracqs = [x for x in acqs if dirnum in x] - if "j" in acqs: - PEdir = 2 - elif "i" in acqs: - PEdir = 1 + # accounts for multiple runs, number of directions, and phase encoding directions + if not numruns: + onerun= True + numruns = {'run-01'} + if numruns: + for session in numruns: + if not onerun: + bvals = [f.filename for f in layout.get(subject=subject_label, + type='dwi', run=session, + extensions=["bval"])] else: - RuntimeError("Phase encoding direction not specified for diffusion data.") - pos = "EMPTY" - neg = "EMPTY" - gdcoeffs = "None" - dwis = layout.get(subject=subject_label, - type='dwi', acquisition=str(dirnum), run=session, - extensions=["nii.gz", "nii"]) - - assert len(dwis) <= 2 - for dwi in dwis: - dwi = dwi.filename - if "-" in layout.get_metadata(dwi)["PhaseEncodingDirection"]: - neg = dwi - else: - pos = dwi - - try: - echospacing = layout.get_metadata(pos)["EffectiveEchoSpacing"] * 1000 - dwi_stage_dict = OrderedDict([("DiffusionPreprocessing", partial(run_diffusion_processsing, - posData=pos, - negData=neg, - path=args.output_dir, - subject="sub-%s" % subject_label, - echospacing=echospacing, - PEdir=PEdir, - gdcoeffs="NONE", - dwiname=dwiname, - n_cpus=args.n_cpus))]) - for stage, stage_func in dwi_stage_dict.iteritems(): - if stage in args.stages: - stage_func() - except: - print("You may be missing a diffusion data set with positive phase encoding direction.") - + bvals = [f.filename for f in layout.get(subject=subject_label, + type='dwi', extensions=["bval"])] + dwi_dict = {'bvalFile':[], 'bval':[], 'dwiFile':[], 'direction':[]} + for bvalfile in bvals: + with open(bvalfile) as f: # get number of directions + bvalues = [bvalue for line in f for bvalue in line.split()] + dwi_dict['bvalFile'].append(bvalfile) + dwi_dict['bval'].append(len(bvalues) - 1) + dwiFile = glob(os.path.join(os.path.dirname(bvalfile),'{0}.nii*'.format(os.path.basename(bvalfile).split('.')[0]))) # ensures bval file has same name as dwi file + assert len(dwiFile) == 1 + dwi_dict['dwiFile'].append(dwiFile[0]) + dwi_dict['direction'].append(layout.get_metadata(dwiFile[0])["PhaseEncodingDirection"][0]) + + # check if length of lists in dictionary are the same + n = len(dwi_dict['bvalFile']) + assert all(len(dwi_dict[k]) for k,v in dwi_dict.items()) + + for dirnum in set(dwi_dict['bval']): + idxs = { i for k,v in dwi_dict.iteritems() for i in range(0,len(dwi_dict['bval'])) if v[i] == dirnum } + PEdirNums = set([dwi_dict['direction'][i] for i in idxs]) + for PEdirNum in PEdirNums: + dwis = [ dwi_dict['dwiFile'][i] for i in idxs if dwi_dict['direction'][i] == PEdirNum ] + assert len(dwis) <= 2 + dwiname = "Diffusion" + "_dir-" + str(dirnum) + "_" + session + "_corr_" + str(PEdirNum) + if "j" in PEdirNum: + PEdir = 2 + elif "i" in PEdirNum: + PEdir = 1 + else: + RuntimeError("Phase encoding direction not specified for diffusion data.") + pos = "EMPTY" + neg = "EMPTY" + gdcoeffs = "None" + for dwi in dwis: + if "-" in layout.get_metadata(dwi)["PhaseEncodingDirection"]: + neg = dwi + else: + pos = dwi + try: + echospacing = layout.get_metadata(pos)["EffectiveEchoSpacing"] * 1000 + dwi_stage_dict = OrderedDict([("DiffusionPreprocessing", partial(run_diffusion_processsing, + posData=pos, + negData=neg, + path=args.output_dir, + subject="sub-%s" % subject_label, + echospacing=echospacing, + PEdir=PEdir, + gdcoeffs="NONE", + dwiname=dwiname, + n_cpus=args.n_cpus))]) + for stage, stage_func in dwi_stage_dict.iteritems(): + if stage in args.stages: + stage_func() + except: + print("You may have missing diffusion data in the positive phase encoding direction.") From 2317eefced86629b0096b1aeb493b557f5481673 Mon Sep 17 00:00:00 2001 From: Yeun Kim Date: Wed, 3 May 2017 15:55:09 -0700 Subject: [PATCH 13/14] added Chris's suggestions and added more comments --- Dockerfile | 2 +- run.py | 22 ++++++++++++++-------- 2 files changed, 15 insertions(+), 9 deletions(-) diff --git a/Dockerfile b/Dockerfile index 3ca9f6a..2fdf2b5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -52,7 +52,7 @@ ENV PATH /opt/freesurfer/bin:/opt/freesurfer/fsfast/bin:/opt/freesurfer/tktools: # Install FSL 5.0.9 RUN apt-get update && \ apt-get install -y --no-install-recommends curl && \ - curl -sSL http://neuro.debian.net/lists/xenial.us-ca.full >> /etc/apt/sources.list.d/neurodebian.sources.list && \ + curl -sSL http://neuro.debian.net/lists/trusty.us-ca.full >> /etc/apt/sources.list.d/neurodebian.sources.list && \ apt-key adv --recv-keys --keyserver hkp://pgp.mit.edu:80 0xA5D32F012649A5A9 && \ apt-get update && \ apt-get install -y fsl-core=5.0.9-1~nd+1+nd16.04+1 && \ diff --git a/run.py b/run.py index 3034598..7d2180c 100644 --- a/run.py +++ b/run.py @@ -194,7 +194,7 @@ def run_diffusion_processsing(**args): 'fMRISurface', 'DiffusionPreprocessing'], default=['PreFreeSurfer', 'FreeSurfer', 'PostFreeSurfer', 'fMRIVolume', 'fMRISurface', - 'DiffusionPreprocessing', 'TaskfMRIAnalysis']) + 'DiffusionPreprocessing']) parser.add_argument('--license_key', help='FreeSurfer license key - letters and numbers after "*" in the email you received after registration. To register (for free) visit https://surfer.nmr.mgh.harvard.edu/registration.html', required=True) parser.add_argument('-v', '--version', action='version', @@ -426,18 +426,21 @@ def run_diffusion_processsing(**args): onerun= True numruns = {'run-01'} if numruns: - for session in numruns: + for numrun in numruns: if not onerun: bvals = [f.filename for f in layout.get(subject=subject_label, - type='dwi', run=session, + type='dwi', run=numrun, extensions=["bval"])] else: bvals = [f.filename for f in layout.get(subject=subject_label, type='dwi', extensions=["bval"])] + ## find number of directions by reading bval files, then create dictionary with corresponding + # bval file name, number of directions, dwi image file name, and phase encoding direction (i or j). dwi_dict = {'bvalFile':[], 'bval':[], 'dwiFile':[], 'direction':[]} - for bvalfile in bvals: - with open(bvalfile) as f: # get number of directions + for bvalfile in bvals: # fine number of directions + with open(bvalfile) as f: bvalues = [bvalue for line in f for bvalue in line.split()] + # fill in the rest of dictionary dwi_dict['bvalFile'].append(bvalfile) dwi_dict['bval'].append(len(bvalues) - 1) dwiFile = glob(os.path.join(os.path.dirname(bvalfile),'{0}.nii*'.format(os.path.basename(bvalfile).split('.')[0]))) # ensures bval file has same name as dwi file @@ -447,15 +450,18 @@ def run_diffusion_processsing(**args): # check if length of lists in dictionary are the same n = len(dwi_dict['bvalFile']) - assert all(len(dwi_dict[k]) for k,v in dwi_dict.items()) + assert all(len(dwi_dict[k]) == n for k,v in dwi_dict.items()) for dirnum in set(dwi_dict['bval']): + ## the following statement extracts index values in dwi_dict['bval'] if the value matches + # "dirnum", which is the number of directions (i.e. 98 or 99). These index values are used + # to find the corresponding PE directions, dwi file names, etc. in the dictionary idxs = { i for k,v in dwi_dict.iteritems() for i in range(0,len(dwi_dict['bval'])) if v[i] == dirnum } PEdirNums = set([dwi_dict['direction'][i] for i in idxs]) for PEdirNum in PEdirNums: dwis = [ dwi_dict['dwiFile'][i] for i in idxs if dwi_dict['direction'][i] == PEdirNum ] assert len(dwis) <= 2 - dwiname = "Diffusion" + "_dir-" + str(dirnum) + "_" + session + "_corr_" + str(PEdirNum) + dwiname = "Diffusion" + "_dir-" + str(dirnum) + "_" + numrun + "_corr_" + str(PEdirNum) if "j" in PEdirNum: PEdir = 2 elif "i" in PEdirNum: @@ -485,6 +491,6 @@ def run_diffusion_processsing(**args): for stage, stage_func in dwi_stage_dict.iteritems(): if stage in args.stages: stage_func() - except: + except NameError: print("You may have missing diffusion data in the positive phase encoding direction.") From 4f6f47cb9d748300b15b352f1ade5ecf4c0c38ae Mon Sep 17 00:00:00 2001 From: Yeun Kim Date: Wed, 3 May 2017 15:57:03 -0700 Subject: [PATCH 14/14] misspelling in comment --- run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/run.py b/run.py index 7d2180c..eaa82a1 100644 --- a/run.py +++ b/run.py @@ -437,7 +437,7 @@ def run_diffusion_processsing(**args): ## find number of directions by reading bval files, then create dictionary with corresponding # bval file name, number of directions, dwi image file name, and phase encoding direction (i or j). dwi_dict = {'bvalFile':[], 'bval':[], 'dwiFile':[], 'direction':[]} - for bvalfile in bvals: # fine number of directions + for bvalfile in bvals: # find number of directions with open(bvalfile) as f: bvalues = [bvalue for line in f for bvalue in line.split()] # fill in the rest of dictionary