diff --git a/rabies/analysis_pkg/main_wf.py b/rabies/analysis_pkg/main_wf.py index 547037e..ea4d555 100644 --- a/rabies/analysis_pkg/main_wf.py +++ b/rabies/analysis_pkg/main_wf.py @@ -17,8 +17,18 @@ def init_main_analysis_wf(preprocess_opts, cr_opts, analysis_opts): split_dict, split_name_list, target_list = read_confound_workflow(conf_output, nativespace=cr_opts.nativespace_analysis) - # update split_name according to the --scan_list option - split_name_list = get_iterable_scan_list(analysis_opts.scan_list, split_name_list) + if len(split_name_list)==0: + raise ValueError(f""" + No outputs were founds from the confound correction stage. + All scans may have been removed for not meeting the minimum_timepoint threshold + when applying --frame_censoring. Outputs will be named empty.nii.gz if this is + the case. + """) + + # filter inclusion/exclusion lists + from rabies.utils import filter_scan_inclusion, filter_scan_exclusion + split_name_list = filter_scan_inclusion(analysis_opts.inclusion_ids, split_name_list) + split_name_list = filter_scan_exclusion(analysis_opts.exclusion_ids, split_name_list) # setting up iterables from the BOLD scan splits main_split = pe.Node(niu.IdentityInterface(fields=['split_name']), @@ -310,6 +320,8 @@ def load_sub_input_dict(maps_dict, bold_file, CR_data_dict, VE_file, STD_file, C def read_confound_workflow(conf_output, nativespace=False): + from nipype import logging + log = logging.getLogger('nipype.workflow') conf_workflow_file = f'{conf_output}/rabies_confound_correction_workflow.pkl' @@ -358,15 +370,26 @@ def read_confound_workflow(conf_output, nativespace=False): # don't include scans that were removed during confound correction corrected_split_name=[] + remove_list = [] import pathlib for name in split_name: filename = pathlib.Path(split_dict[name]['cleaned_path']).name if 'empty' in filename: + remove_list.append(name) del split_dict[name] else: corrected_split_name.append(name) split_name = corrected_split_name + if len(remove_list)>0: + scan_list_str = '' + for name in remove_list: + scan_list_str += f'\n - {name}' + log.warning(f""" + The following scans were not included for analysis as the file was empty: {scan_list_str} + This is likely due to not meeting the minimum_timepoints threshold from --frame_censoring. + """) + return split_dict, split_name, target_list diff --git a/rabies/confound_correction_pkg/main_wf.py b/rabies/confound_correction_pkg/main_wf.py index 961842c..78608bc 100644 --- a/rabies/confound_correction_pkg/main_wf.py +++ b/rabies/confound_correction_pkg/main_wf.py @@ -23,6 +23,11 @@ def init_main_confound_correction_wf(preprocess_opts, cr_opts): else: split_dict, split_name, target_list = read_preproc_workflow(preproc_output, nativespace=cr_opts.nativespace_analysis) + # filter inclusion/exclusion lists + from rabies.utils import filter_scan_inclusion, filter_scan_exclusion + split_name = filter_scan_inclusion(cr_opts.inclusion_ids, split_name) + split_name = filter_scan_exclusion(cr_opts.exclusion_ids, split_name) + # setting up iterables from the BOLD scan splits main_split = pe.Node(niu.IdentityInterface(fields=['split_name']), name="main_split") diff --git a/rabies/parser.py b/rabies/parser.py index a287716..d080b4b 100644 --- a/rabies/parser.py +++ b/rabies/parser.py @@ -80,6 +80,33 @@ def get_parser(): description= "Options for parallel execution and memory management." ) + g_execution.add_argument( + '--inclusion_ids', type=str, + nargs="*", # 0 or more values expected => creates a list + default=['all'], + help= + "Define a list of BOLD scan to include, i.e. run the pipeline on a subset of the data. \n" + "To do so, provide the full path to the corresponding BOLD file in the input BIDS folder. The list \n" + "of scan can be specified manually as a list of file name '--scan_list scan1.nii.gz \n" + "scan2.nii.gz ...' or the files can be imbedded into a .txt file with one filename per row.\n" + "By default, 'all' the scans found in the input BIDS directory or from the previous \n" + "processing step. This can be provided at any processing stage.\n" + "***NOTE: do not enter this parameter right before the processing stage (preprocess, etc...), this will cause \n" + "parsing errors. Instead, provide another parameter after --inclusion_ids (e.g. --verbose or -p). \n" + "(default: %(default)s)\n" + "\n" + ) + g_execution.add_argument( + '--exclusion_ids', type=str, + nargs="*", # 0 or more values expected => creates a list + default=['none'], + help= + "Instead of providing a list of scans to include, this argument provides a list of scans to exclude (while \n" + "keeping all other scans). This argument follows the same syntax rules as --includion_ids. --exclusion_ids \n" + "and --inclusion_ids cannot be used simultaneously. \n" + "(default: %(default)s)\n" + "\n" + ) g_execution.add_argument( "-p", "--plugin", default='Linear', choices=['Linear', 'MultiProc', 'SGE', 'SGEGraph', @@ -129,6 +156,14 @@ def get_parser(): "(default: %(default)s)\n" "\n" ) + g_execution.add_argument( + "-f", "--force", dest='force', action='store_true', + help= + "The pipeline will not stop if previous outputs are encountered. \n" + "Previous outputs will be overwritten.\n" + "(default: %(default)s)\n" + "\n" + ) ####Preprocessing @@ -958,8 +993,11 @@ def get_parser(): return parser -def read_parser(parser): - opts = parser.parse_args() +def read_parser(parser, args): + if args is None: + opts = parser.parse_args() + else: + opts = parser.parse_args(args) if opts.rabies_stage == 'preprocess': opts.anat_inho_cor = parse_argument(opt=opts.anat_inho_cor, diff --git a/rabies/preprocess_pkg/main_wf.py b/rabies/preprocess_pkg/main_wf.py index 88d769f..3891dcc 100644 --- a/rabies/preprocess_pkg/main_wf.py +++ b/rabies/preprocess_pkg/main_wf.py @@ -135,7 +135,7 @@ def init_main_wf(data_dir_path, output_folder, opts, name='main_wf'): bids.config.set_option('extension_initial_dot', True) layout = bids.layout.BIDSLayout(data_dir_path, validate=False) split_name, scan_info, run_iter, scan_list, bold_scan_list = prep_bids_iter( - layout, opts.bold_only) + layout, opts.bold_only, inclusion_list=opts.inclusion_ids, exclusion_list=opts.exclusion_ids) # setting up all iterables main_split = pe.Node(niu.IdentityInterface(fields=['split_name', 'scan_info']), diff --git a/rabies/preprocess_pkg/utils.py b/rabies/preprocess_pkg/utils.py index 02a8031..c4b4d8f 100644 --- a/rabies/preprocess_pkg/utils.py +++ b/rabies/preprocess_pkg/utils.py @@ -8,7 +8,7 @@ ) from rabies.utils import run_command -def prep_bids_iter(layout, bold_only=False): +def prep_bids_iter(layout, bold_only=False, inclusion_list=['all'], exclusion_list=['none']): ''' This function takes as input a BIDSLayout, and generates iteration lists for managing the workflow's iterables depending on whether --bold_only is @@ -38,6 +38,19 @@ def prep_bids_iter(layout, bold_only=False): raise ValueError( "No functional file with the suffix 'bold' were found among the BIDS directory.") + # filter inclusion/exclusion lists + from rabies.utils import filter_scan_inclusion, filter_scan_exclusion + boldname_list=[pathlib.Path(bold.filename).name.rsplit(".nii")[0] for bold in bold_bids] + updated_split_name = filter_scan_inclusion(inclusion_list, boldname_list) + updated_split_name = filter_scan_exclusion(exclusion_list, updated_split_name) + + filtered_bold_bids=[] + for name in updated_split_name: + for bold in bold_bids: + if name in bold.filename: + filtered_bold_bids.append(bold) + bold_bids = filtered_bold_bids + bold_dict = {} for bold in bold_bids: sub = bold.get_entities()['subject'] diff --git a/rabies/run_main.py b/rabies/run_main.py index 494b254..7c83b78 100644 --- a/rabies/run_main.py +++ b/rabies/run_main.py @@ -11,10 +11,10 @@ rabies_path = os.environ['HOME']+'/.local/share/rabies' -def execute_workflow(): +def execute_workflow(args=None): # generates the parser CLI and execute the workflow based on specified parameters. parser = get_parser() - opts = read_parser(parser) + opts = read_parser(parser, args) try: # convert the output path to absolute if not already the case opts.output_dir = os.path.abspath(str(opts.output_dir)) @@ -40,6 +40,13 @@ def execute_workflow(): args += input log.info(args) + # inclusion/exclusion list are incompatible parameters + if (not opts.inclusion_ids[0]=='all') and (not opts.exclusion_ids[0]=='none'): + raise ValueError(f""" + Either an inclusion list (--inclusion_ids) or exclusion list (--exclusion_ids) + can be provided, not both. + """) + if opts.rabies_stage == 'preprocess': workflow = preprocess(opts, log) elif opts.rabies_stage == 'confound_correction': @@ -72,12 +79,13 @@ def execute_workflow(): def prep_logging(opts, output_folder): cli_file = f'{output_folder}/rabies_{opts.rabies_stage}.pkl' - if os.path.isfile(cli_file): + if os.path.isfile(cli_file) and not opts.force: raise ValueError(f""" A previous run was indicated by the presence of {cli_file}. This can lead to inconsistencies between previous outputs and the log files. - To prevent this, you are required to manually remove {cli_file}, and we - recommend also removing previous datasinks from the {opts.rabies_stage} RABIES step. + To prevent this, we recommend removing previous datasinks from the {opts.rabies_stage} + RABIES stage. To continue with your execution, the {cli_file} file must be + removed (use --force to automatically do so). """) # remove old versions of the log if already existing diff --git a/rabies/utils.py b/rabies/utils.py index bff9272..5190829 100644 --- a/rabies/utils.py +++ b/rabies/utils.py @@ -389,6 +389,71 @@ def flatten_list(l): return l +def filter_scan_exclusion(exclusion_list, split_name): + # the function removes a list of scan IDs from split_name + + # exclusion_list: the input provided by the user + # split_name: a list of all scan IDs that were found + + import numpy as np + import pandas as pd + if os.path.isfile(os.path.abspath(exclusion_list[0])): + updated_split_name=[] + if not '.nii' in pathlib.Path(exclusion_list[0]).name: + # read the file as a .txt + exclusion_list = np.array(pd.read_csv(os.path.abspath(exclusion_list[0]), header=None)).flatten() + for split in split_name: + exclude = False + for scan in exclusion_list: + if split in scan: + exclude = True + if not exclude: + updated_split_name.append(split) + elif exclusion_list[0]=='none': + updated_split_name = split_name + else: + raise ValueError(f"The --exclusion_ids {exclusion_list} input had improper format. It must the full path to a .txt or .nii files.") + + if len(updated_split_name)==0: + raise ValueError(f""" + No scans are left after scan exclusion! + """) + + return updated_split_name + + +def filter_scan_inclusion(inclusion_list, split_name): + # the function will update the list of scan IDs in split_name to correspond to inclusion/exclusion list + + # inclusion_list: the input provided by the user + # split_name: a list of all scan IDs that were found + + import numpy as np + import pandas as pd + if os.path.isfile(os.path.abspath(inclusion_list[0])): + updated_split_name=[] + if '.nii' in pathlib.Path(inclusion_list[0]).name: + for scan in inclusion_list: + updated_split_name.append(find_split(scan, split_name)) + else: + # read the file as a .txt + inclusion_list = np.array(pd.read_csv(os.path.abspath(inclusion_list[0]), header=None)).flatten() + for scan in inclusion_list: + updated_split_name.append(find_split(scan, split_name)) + elif inclusion_list[0]=='all': + updated_split_name = split_name + else: + raise ValueError(f"The --inclusion_ids {inclusion_list} input had improper format. It must the full path to a .txt or .nii files, or 'all' to keep all scans.") + return updated_split_name + + +def find_split(scan, split_name): + for split in split_name: + if split in scan: + return split + raise ValueError(f"No previous file name is matching {scan}") + + ###################### #FUNCTIONS TO READ WORKFLOW GRAPH ###################### @@ -437,3 +502,53 @@ def fill_node_dict(d, key_l, e): return d else: return e + + +###################### +#DEBUGGING +###################### + +def generate_token_data(tmppath, number_scans): + # this function generates fake scans at low resolution for quick testing and debugging + + os.makedirs(tmppath+'/inputs', exist_ok=True) + + if 'XDG_DATA_HOME' in os.environ.keys(): + rabies_path = os.environ['XDG_DATA_HOME']+'/rabies' + else: + rabies_path = os.environ['HOME']+'/.local/share/rabies' + + template = f"{rabies_path}/DSURQE_40micron_average.nii.gz" + mask = f"{rabies_path}/DSURQE_40micron_mask.nii.gz" + + spacing = (float(1), float(1), float(1)) # resample to 1mmx1mmx1mm + resampled_template = resample_image_spacing(sitk.ReadImage(template), spacing) + # generate template masks + resampled_mask = resample_image_spacing(sitk.ReadImage(mask), spacing) + array = sitk.GetArrayFromImage(resampled_mask) + array[array < 1] = 0 + array[array > 1] = 1 + binarized = sitk.GetImageFromArray(array, isVector=False) + binarized.CopyInformation(resampled_mask) + sitk.WriteImage(binarized, tmppath+'/inputs/token_mask.nii.gz') + array[:, :, :6] = 0 + binarized = sitk.GetImageFromArray(array, isVector=False) + binarized.CopyInformation(resampled_mask) + sitk.WriteImage(binarized, tmppath+'/inputs/token_mask_half.nii.gz') + + # generate fake scans from the template + array = sitk.GetArrayFromImage(resampled_template) + array_4d = np.repeat(array[np.newaxis, :, :, :], 15, axis=0) + + for i in range(number_scans): + # generate anatomical scan + sitk.WriteImage(resampled_template, tmppath+f'/inputs/sub-token{i+1}_T1w.nii.gz') + # generate functional scan + array_4d_ = array_4d + np.random.normal(0, array_4d.mean() + / 100, array_4d.shape) # add gaussian noise + sitk.WriteImage(sitk.GetImageFromArray(array_4d_, isVector=False), + tmppath+f'/inputs/sub-token{i+1}_bold.nii.gz') + + # necessary to read matrix orientation properly at the analysis stage + sitk.WriteImage(copyInfo_4DImage(sitk.ReadImage(tmppath+f'/inputs/sub-token{i+1}_bold.nii.gz'), sitk.ReadImage(tmppath + + f'/inputs/sub-token{i+1}_T1w.nii.gz'), sitk.ReadImage(tmppath+f'/inputs/sub-token{i+1}_bold.nii.gz')), tmppath+f'/inputs/sub-token{i+1}_bold.nii.gz') diff --git a/scripts/debug_workflow.py b/scripts/debug_workflow.py index 87241e3..4cbc3dd 100644 --- a/scripts/debug_workflow.py +++ b/scripts/debug_workflow.py @@ -1,65 +1,28 @@ #! /usr/bin/env python -import SimpleITK as sitk -import os -import sys -import numpy as np -import tempfile -import shutil -from rabies.utils import resample_image_spacing, copyInfo_4DImage - -if len(sys.argv) == 2: - tmppath = sys.argv[1] -else: - tmppath = tempfile.mkdtemp() - -os.makedirs(tmppath+'/inputs', exist_ok=True) - -if 'XDG_DATA_HOME' in os.environ.keys(): - rabies_path = os.environ['XDG_DATA_HOME']+'/rabies' -else: - rabies_path = os.environ['HOME']+'/.local/share/rabies' - -template = f"{rabies_path}/DSURQE_40micron_average.nii.gz" -mask = f"{rabies_path}/DSURQE_40micron_mask.nii.gz" - -img = sitk.ReadImage(template) -spacing = (float(1), float(1), float(1)) # resample to 1mmx1mmx1mm -resampled = resample_image_spacing(sitk.ReadImage(template), spacing) -array = sitk.GetArrayFromImage(resampled) -array_4d = np.repeat(array[np.newaxis, :, :, :], 15, axis=0) -array_4d += np.random.normal(0, array_4d.mean() - / 100, array_4d.shape) # add gaussian noise -sitk.WriteImage(resampled, tmppath+'/inputs/sub-token_T1w.nii.gz') -sitk.WriteImage(resampled, tmppath+'/inputs/sub-token2_T1w.nii.gz') -sitk.WriteImage(resampled, tmppath+'/inputs/sub-token3_T1w.nii.gz') -sitk.WriteImage(sitk.GetImageFromArray(array_4d, isVector=False), - tmppath+'/inputs/sub-token_bold.nii.gz') - -resampled = resample_image_spacing(sitk.ReadImage(mask), spacing) -array = sitk.GetArrayFromImage(resampled) -array[array < 1] = 0 -array[array > 1] = 1 -binarized = sitk.GetImageFromArray(array, isVector=False) -binarized.CopyInformation(resampled) -sitk.WriteImage(binarized, tmppath+'/inputs/token_mask.nii.gz') -array[:, :, :6] = 0 -binarized = sitk.GetImageFromArray(array, isVector=False) -binarized.CopyInformation(resampled) -sitk.WriteImage(binarized, tmppath+'/inputs/token_mask_half.nii.gz') - +from rabies.utils import generate_token_data +from rabies.run_main import execute_workflow -sitk.WriteImage(copyInfo_4DImage(sitk.ReadImage(tmppath+'/inputs/sub-token_bold.nii.gz'), sitk.ReadImage(tmppath - + '/inputs/sub-token_T1w.nii.gz'), sitk.ReadImage(tmppath+'/inputs/sub-token_bold.nii.gz')), tmppath+'/inputs/sub-token_bold.nii.gz') +import tempfile +tmppath = tempfile.mkdtemp() +#### increase the number of scans generated to 3 if running group analysis +generate_token_data(tmppath, number_scans=3) output_folder = f'{tmppath}/outputs' + +#### HERE ARE SET THE DESIRED PARAMETERS FOR PREPROCESSING args = [ + f'--exclusion_ids',f'{tmppath}/inputs/sub-token1_bold.nii.gz', + '-f', #'--debug', 'preprocess', f'{tmppath}/inputs', output_folder, - '--anat_inho_cor_method','disable', '--bold_inho_cor_method', 'disable', - '--coreg_script', 'NULL', '--atlas_reg_script', 'NULL', '--data_type', 'int16', '--bold_only', '--fast_commonspace', - '--anat_template', f'{tmppath}/inputs/sub-token_T1w.nii.gz', + '--anat_inho_cor', 'method=disable,otsu_thresh=2,multiotsu=false', + '--bold_inho_cor', 'method=disable,otsu_thresh=2,multiotsu=false', + '--bold2anat_coreg', 'registration=no_reg,masking=false,brain_extraction=false', + '--commonspace_reg', 'masking=false,brain_extraction=false,fast_commonspace=true,template_registration=no_reg', + '--data_type', 'int16', + '--anat_template', f'{tmppath}/inputs/sub-token1_T1w.nii.gz', '--brain_mask', f'{tmppath}/inputs/token_mask.nii.gz', '--WM_mask', f'{tmppath}/inputs/token_mask.nii.gz', '--CSF_mask', f'{tmppath}/inputs/token_mask.nii.gz', @@ -67,40 +30,27 @@ '--labels', f'{tmppath}/inputs/token_mask.nii.gz', ] -from rabies.parser import get_parser - -parser = get_parser() - -opts = parser.parse_args(args) +execute_workflow(args=args) -if not os.path.isdir(output_folder): - os.makedirs(output_folder) - -from rabies.run_main import prep_logging, preprocess, confound_correction, analysis -log = prep_logging(opts, output_folder) - -# print complete CLI command -args = 'CLI INPUTS: \n' -for arg in vars(opts): - input = f'-> {arg} = {getattr(opts, arg)} \n' - args += input -log.info(args) -if opts.rabies_stage == 'preprocess': - workflow = preprocess(opts, log) -elif opts.rabies_stage == 'confound_correction': - workflow = confound_correction(opts, log) -elif opts.rabies_stage == 'analysis': - workflow = analysis(opts, log) -else: - parser.print_help() -workflow.base_dir = output_folder +''' +args = [ + f'--exclusion_ids',f'{tmppath}/inputs/sub-token1_bold.nii.gz',f'{tmppath}/inputs/sub-token2_bold.nii.gz', + '-f', + 'confound_correction', output_folder, output_folder, + '--nativespace_analysis', + ] +execute_workflow(args=args) -log.info(f'Running workflow with {opts.plugin} plugin.') -# execute workflow, with plugin_args limiting the cluster load for parallel execution -graph_out = workflow.run(plugin=opts.plugin, plugin_args={'max_jobs': 50, 'dont_resubmit_completed_jobs': True, - 'n_procs': opts.local_threads, 'qsub_args': f'-pe smp {str(opts.min_proc)}'}) +args = [ + f'--exclusion_ids',f'{tmppath}/inputs/sub-token3_bold.nii.gz', + '-f', + 'analysis', output_folder, output_folder, + '--data_diagnosis' + ] +execute_workflow(args=args) +''' diff --git a/scripts/error_check_rabies.py b/scripts/error_check_rabies.py index ad5e618..20ac22a 100755 --- a/scripts/error_check_rabies.py +++ b/scripts/error_check_rabies.py @@ -1,60 +1,21 @@ #! /usr/bin/env python -import SimpleITK as sitk import os import sys -import numpy as np import tempfile import shutil import subprocess -from rabies.utils import resample_image_spacing, copyInfo_4DImage +from rabies.utils import generate_token_data if len(sys.argv) == 2: tmppath = sys.argv[1] else: tmppath = tempfile.mkdtemp() -os.makedirs(tmppath+'/inputs', exist_ok=True) - -if 'XDG_DATA_HOME' in os.environ.keys(): - rabies_path = os.environ['XDG_DATA_HOME']+'/rabies' -else: - rabies_path = os.environ['HOME']+'/.local/share/rabies' - -template = f"{rabies_path}/DSURQE_40micron_average.nii.gz" -mask = f"{rabies_path}/DSURQE_40micron_mask.nii.gz" - -img = sitk.ReadImage(template) -spacing = (float(1), float(1), float(1)) # resample to 1mmx1mmx1mm -resampled = resample_image_spacing(sitk.ReadImage(template), spacing) -array = sitk.GetArrayFromImage(resampled) -array_4d = np.repeat(array[np.newaxis, :, :, :], 15, axis=0) -array_4d += np.random.normal(0, array_4d.mean() - / 100, array_4d.shape) # add gaussian noise -sitk.WriteImage(resampled, tmppath+'/inputs/sub-token_T1w.nii.gz') -sitk.WriteImage(resampled, tmppath+'/inputs/sub-token2_T1w.nii.gz') -sitk.WriteImage(resampled, tmppath+'/inputs/sub-token3_T1w.nii.gz') -sitk.WriteImage(sitk.GetImageFromArray(array_4d, isVector=False), - tmppath+'/inputs/sub-token_bold.nii.gz') - -resampled = resample_image_spacing(sitk.ReadImage(mask), spacing) -array = sitk.GetArrayFromImage(resampled) -array[array < 1] = 0 -array[array > 1] = 1 -binarized = sitk.GetImageFromArray(array, isVector=False) -binarized.CopyInformation(resampled) -sitk.WriteImage(binarized, tmppath+'/inputs/token_mask.nii.gz') -array[:, :, :6] = 0 -binarized = sitk.GetImageFromArray(array, isVector=False) -binarized.CopyInformation(resampled) -sitk.WriteImage(binarized, tmppath+'/inputs/token_mask_half.nii.gz') - - -sitk.WriteImage(copyInfo_4DImage(sitk.ReadImage(tmppath+'/inputs/sub-token_bold.nii.gz'), sitk.ReadImage(tmppath - + '/inputs/sub-token_T1w.nii.gz'), sitk.ReadImage(tmppath+'/inputs/sub-token_bold.nii.gz')), tmppath+'/inputs/sub-token_bold.nii.gz') +generate_token_data(tmppath, number_scans=1) command = f"rabies --verbose 1 preprocess {tmppath}/inputs {tmppath}/outputs --anat_inho_cor method=disable,otsu_thresh=2,multiotsu=false --bold_inho_cor method=disable,otsu_thresh=2,multiotsu=false \ - --anat_template {tmppath}/inputs/sub-token_T1w.nii.gz --brain_mask {tmppath}/inputs/token_mask.nii.gz --WM_mask {tmppath}/inputs/token_mask.nii.gz --CSF_mask {tmppath}/inputs/token_mask.nii.gz --vascular_mask {tmppath}/inputs/token_mask.nii.gz --labels {tmppath}/inputs/token_mask.nii.gz \ + --anat_template {tmppath}/inputs/sub-token1_T1w.nii.gz --brain_mask {tmppath}/inputs/token_mask.nii.gz --WM_mask {tmppath}/inputs/token_mask.nii.gz --CSF_mask {tmppath}/inputs/token_mask.nii.gz --vascular_mask {tmppath}/inputs/token_mask.nii.gz --labels {tmppath}/inputs/token_mask.nii.gz \ --bold2anat_coreg registration=no_reg,masking=false,brain_extraction=false --commonspace_reg masking=false,brain_extraction=false,fast_commonspace=true,template_registration=no_reg --data_type int16 --bold_only --detect_dummy \ --tpattern seq-z --apply_STC --voxelwise_motion --isotropic_HMC" process = subprocess.run( @@ -65,7 +26,7 @@ shutil.rmtree(f'{tmppath}/outputs/') command = f"rabies --verbose 1 preprocess {tmppath}/inputs {tmppath}/outputs --anat_inho_cor method=disable,otsu_thresh=2,multiotsu=false --bold_inho_cor method=disable,otsu_thresh=2,multiotsu=false \ - --anat_template {tmppath}/inputs/sub-token_T1w.nii.gz --brain_mask {tmppath}/inputs/token_mask.nii.gz --WM_mask {tmppath}/inputs/token_mask.nii.gz --CSF_mask {tmppath}/inputs/token_mask.nii.gz --vascular_mask {tmppath}/inputs/token_mask.nii.gz --labels {tmppath}/inputs/token_mask.nii.gz \ + --anat_template {tmppath}/inputs/sub-token1_T1w.nii.gz --brain_mask {tmppath}/inputs/token_mask.nii.gz --WM_mask {tmppath}/inputs/token_mask.nii.gz --CSF_mask {tmppath}/inputs/token_mask.nii.gz --vascular_mask {tmppath}/inputs/token_mask.nii.gz --labels {tmppath}/inputs/token_mask.nii.gz \ --bold2anat_coreg registration=no_reg,masking=true,brain_extraction=true --commonspace_reg masking=true,brain_extraction=true,fast_commonspace=true,template_registration=no_reg --data_type int16 \ --HMC_option 0" process = subprocess.run( @@ -81,19 +42,28 @@ shell=True, ) -### Add subjects for the group analysis to run -array_4d += np.random.normal(0, array_4d.mean() - / 100, array_4d.shape) # add gaussian noise -sitk.WriteImage(sitk.GetImageFromArray(array_4d, isVector=False), - tmppath+'/inputs/sub-token2_bold.nii.gz') -array_4d += np.random.normal(0, array_4d.mean() - / 100, array_4d.shape) # add gaussian noise -sitk.WriteImage(sitk.GetImageFromArray(array_4d, isVector=False), - tmppath+'/inputs/sub-token3_bold.nii.gz') +# rerunning confound correction without censoring, which removes all scans +os.remove(f'{tmppath}/outputs/rabies_confound_correction.pkl') +command = f"rabies --verbose 1 confound_correction {tmppath}/outputs {tmppath}/outputs --nativespace_analysis" +process = subprocess.run( + command, + check=True, + shell=True, + ) + +command = f"rabies --verbose 1 analysis {tmppath}/outputs {tmppath}/outputs --data_diagnosis" +process = subprocess.run( + command, + check=True, + shell=True, + ) + +shutil.rmtree(f'{tmppath}/inputs/') +generate_token_data(tmppath, number_scans=3) shutil.rmtree(f'{tmppath}/outputs/') command = f"rabies --verbose 1 preprocess {tmppath}/inputs {tmppath}/outputs --anat_inho_cor method=disable,otsu_thresh=2,multiotsu=false --bold_inho_cor method=disable,otsu_thresh=2,multiotsu=false \ - --anat_template {tmppath}/inputs/sub-token_T1w.nii.gz --brain_mask {tmppath}/inputs/token_mask.nii.gz --WM_mask {tmppath}/inputs/token_mask_half.nii.gz --CSF_mask {tmppath}/inputs/token_mask_half.nii.gz --vascular_mask {tmppath}/inputs/token_mask_half.nii.gz --labels {tmppath}/inputs/token_mask.nii.gz \ + --anat_template {tmppath}/inputs/sub-token1_T1w.nii.gz --brain_mask {tmppath}/inputs/token_mask.nii.gz --WM_mask {tmppath}/inputs/token_mask_half.nii.gz --CSF_mask {tmppath}/inputs/token_mask_half.nii.gz --vascular_mask {tmppath}/inputs/token_mask_half.nii.gz --labels {tmppath}/inputs/token_mask.nii.gz \ --bold2anat_coreg registration=no_reg,masking=false,brain_extraction=false --commonspace_reg masking=false,brain_extraction=false,fast_commonspace=true,template_registration=no_reg --data_type int16 \ --HMC_option 0" process = subprocess.run(