From eb86d69fafd64aa778e0acdd47cdd48931945ad9 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Fri, 30 Aug 2024 10:26:03 -0400 Subject: [PATCH 01/19] Draft pre-init ingression. --- qsirecon/cli/parser.py | 25 +++++++++++++++++++++---- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/qsirecon/cli/parser.py b/qsirecon/cli/parser.py index ba40bbe..370a96c 100644 --- a/qsirecon/cli/parser.py +++ b/qsirecon/cli/parser.py @@ -442,7 +442,7 @@ def parse_args(args=None, namespace=None): f"total threads (--nthreads/--n_cpus={config.nipype.nprocs})" ) - bids_dir = config.execution.bids_dir + input_dir = config.execution.input_dir output_dir = config.execution.output_dir work_dir = config.execution.work_dir version = config.environment.version @@ -454,16 +454,16 @@ def parse_args(args=None, namespace=None): config.from_dict({}) # Ensure input and output folders are not the same - if output_dir == bids_dir: + if output_dir == input_dir: parser.error( "The selected output folder is the same as the input BIDS folder. " "Please modify the output path (suggestion: %s)." - % bids_dir + % input_dir / "derivatives" / ("qsirecon-%s" % version.split("+")[0]) ) - if bids_dir in work_dir.parents: + if input_dir in work_dir.parents: parser.error( "The selected working directory is a subdirectory of the input BIDS folder. " "Please modify the output path." @@ -475,6 +475,23 @@ def parse_args(args=None, namespace=None): config.execution.log_dir.mkdir(exist_ok=True, parents=True) work_dir.mkdir(exist_ok=True, parents=True) + # Run ingression if necessary + if config.execution.input_type in ('ukbb', 'hcpya'): + from ingress2qsirecon.utils.workflows import ingress2qsirecon + + config.execution.bids_dir = "blah" + + wf = ingress2qsirecon( + config.execution.input_dir, + config.execution.bids_dir, + config.execution.input_type, + ) + # Configure the nipype workflow + wf.config["execution"]["crashdump_dir"] = str(config.execution.log_dir) + wf.run() + else: + config.execution.bids_dir = config.execution.input_dir + # Force initialization of the BIDSLayout config.execution.init() all_subjects = config.execution.layout.get_subjects() From 21702d1006ecfdd807880260ad296d705d0efc7e Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Fri, 30 Aug 2024 11:36:35 -0400 Subject: [PATCH 02/19] Add input_dir. --- qsirecon/cli/parser.py | 9 +++++++-- qsirecon/config.py | 3 +++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/qsirecon/cli/parser.py b/qsirecon/cli/parser.py index 370a96c..c48fd48 100644 --- a/qsirecon/cli/parser.py +++ b/qsirecon/cli/parser.py @@ -119,8 +119,13 @@ def _bids_filter(value, parser): "bids_dir", action="store", type=PathExists, - help="The root folder of a BIDS valid dataset (sub-XXXXX folders should " - "be found at the top level in this folder).", + dest="input_dir", + help=( + "The root folder of a BIDS valid dataset " + "(sub-XXXXX folders should be found at the top level in this folder). " + "If the dataset is not BIDS-valid, " + "then a BIDS-compliant version will be created based on the --input-type value." + ), ) parser.add_argument( "output_dir", diff --git a/qsirecon/config.py b/qsirecon/config.py index 63d9bcb..cf97aeb 100644 --- a/qsirecon/config.py +++ b/qsirecon/config.py @@ -383,6 +383,9 @@ class execution(_Config): bids_dir = None """An existing path to the dataset, which must be BIDS-compliant.""" + input_dir = None + """An existing path to the input data, which may not be BIDS-compliant + (in which case a BIDS-compliant version will be created and stored as bids_dir).""" derivatives = {} """Path(s) to search for pre-computed derivatives""" bids_database_dir = None From fb6bacfc19b2604ee7a86b724124f8e1c06f6d56 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Fri, 30 Aug 2024 11:38:04 -0400 Subject: [PATCH 03/19] Update pyproject.toml --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index bfe6289..ff5077e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,6 +23,7 @@ dependencies = [ "filelock", "fury", "indexed_gzip <= 1.8.7", + "Ingress2QSIRecon == 0.1.0", "jinja2 < 3.1", "matplotlib", "networkx ~= 2.8.8", From bca88322ccaffb4cb6380890dc43a60b01c86230 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Fri, 30 Aug 2024 11:40:59 -0400 Subject: [PATCH 04/19] Update pyproject.toml --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index ff5077e..2d59647 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ dependencies = [ "jinja2 < 3.1", "matplotlib", "networkx ~= 2.8.8", - "nibabel <= 5.2.0", + "nibabel <= 6.0.0", "nilearn == 0.10.1", "nipype == 1.8.6", "nireports ~= 24.0.2", From 60e4ceaf8f5be96779887de925ce56cedcf15fb4 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Fri, 30 Aug 2024 11:42:46 -0400 Subject: [PATCH 05/19] Update parser.py --- qsirecon/cli/parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qsirecon/cli/parser.py b/qsirecon/cli/parser.py index c48fd48..e3045ef 100644 --- a/qsirecon/cli/parser.py +++ b/qsirecon/cli/parser.py @@ -481,7 +481,7 @@ def parse_args(args=None, namespace=None): work_dir.mkdir(exist_ok=True, parents=True) # Run ingression if necessary - if config.execution.input_type in ('ukbb', 'hcpya'): + if config.execution.input_type in ("ukbb", "hcpya"): from ingress2qsirecon.utils.workflows import ingress2qsirecon config.execution.bids_dir = "blah" From 747579a97ffa1b5917fc78b1a0e9b8c9f56996c0 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Fri, 30 Aug 2024 11:50:45 -0400 Subject: [PATCH 06/19] Update pyproject.toml --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 2d59647..62a726f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ dependencies = [ "matplotlib", "networkx ~= 2.8.8", "nibabel <= 6.0.0", - "nilearn == 0.10.1", + "nilearn", "nipype == 1.8.6", "nireports ~= 24.0.2", "niworkflows >=1.9,<= 1.10", From 742ff2b3e6e43b6e28f126a537761533455f12f8 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Fri, 30 Aug 2024 12:05:00 -0400 Subject: [PATCH 07/19] Update parser.py --- qsirecon/cli/parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/qsirecon/cli/parser.py b/qsirecon/cli/parser.py index e3045ef..54383e9 100644 --- a/qsirecon/cli/parser.py +++ b/qsirecon/cli/parser.py @@ -116,10 +116,10 @@ def _bids_filter(value, parser): # required, positional arguments # IMPORTANT: they must go directly with the parser object parser.add_argument( - "bids_dir", + "input_dir", action="store", + metavar="bids_dir", type=PathExists, - dest="input_dir", help=( "The root folder of a BIDS valid dataset " "(sub-XXXXX folders should be found at the top level in this folder). " From 02e2dfe236f4bd5077cf3d1f510874bd013e30f1 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Fri, 30 Aug 2024 12:15:57 -0400 Subject: [PATCH 08/19] Update parser.py --- qsirecon/cli/parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qsirecon/cli/parser.py b/qsirecon/cli/parser.py index 54383e9..e071fd3 100644 --- a/qsirecon/cli/parser.py +++ b/qsirecon/cli/parser.py @@ -456,7 +456,7 @@ def parse_args(args=None, namespace=None): # sections (we used `init=False` above). # This must be done after cleaning the work directory, or we could delete an # open SQLite database - config.from_dict({}) + config.from_dict({}, ignore=["layout"]) # Ensure input and output folders are not the same if output_dir == input_dir: From a316f94159c00fff5f2e13c1bfd594b5571a823f Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Fri, 30 Aug 2024 13:20:46 -0400 Subject: [PATCH 09/19] Update parser.py --- qsirecon/cli/parser.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/qsirecon/cli/parser.py b/qsirecon/cli/parser.py index e071fd3..b59a29a 100644 --- a/qsirecon/cli/parser.py +++ b/qsirecon/cli/parser.py @@ -452,12 +452,6 @@ def parse_args(args=None, namespace=None): work_dir = config.execution.work_dir version = config.environment.version - # Update the config with an empty dict to trigger initialization of all config - # sections (we used `init=False` above). - # This must be done after cleaning the work directory, or we could delete an - # open SQLite database - config.from_dict({}, ignore=["layout"]) - # Ensure input and output folders are not the same if output_dir == input_dir: parser.error( @@ -475,16 +469,16 @@ def parse_args(args=None, namespace=None): ) # Setup directories - config.execution.log_dir = config.execution.output_dir / "logs" + log_dir = output_dir / "logs" # Check and create output and working directories - config.execution.log_dir.mkdir(exist_ok=True, parents=True) + log_dir.mkdir(exist_ok=True, parents=True) work_dir.mkdir(exist_ok=True, parents=True) # Run ingression if necessary if config.execution.input_type in ("ukbb", "hcpya"): from ingress2qsirecon.utils.workflows import ingress2qsirecon - config.execution.bids_dir = "blah" + config.execution.bids_dir = work_dir / "bids" wf = ingress2qsirecon( config.execution.input_dir, @@ -492,11 +486,18 @@ def parse_args(args=None, namespace=None): config.execution.input_type, ) # Configure the nipype workflow - wf.config["execution"]["crashdump_dir"] = str(config.execution.log_dir) + wf.config["execution"]["crashdump_dir"] = str(log_dir) wf.run() else: config.execution.bids_dir = config.execution.input_dir + # Update the config with an empty dict to trigger initialization of all config + # sections (we used `init=False` above). + # This must be done after cleaning the work directory, or we could delete an + # open SQLite database + config.from_dict({}) + config.execution.log_dir = log_dir + # Force initialization of the BIDSLayout config.execution.init() all_subjects = config.execution.layout.get_subjects() From 61d4f07d814ecd9405bcd70fcc8e2d692c2d7581 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Fri, 30 Aug 2024 13:35:08 -0400 Subject: [PATCH 10/19] Update parser.py --- qsirecon/cli/parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/qsirecon/cli/parser.py b/qsirecon/cli/parser.py index b59a29a..68333ad 100644 --- a/qsirecon/cli/parser.py +++ b/qsirecon/cli/parser.py @@ -475,7 +475,7 @@ def parse_args(args=None, namespace=None): work_dir.mkdir(exist_ok=True, parents=True) # Run ingression if necessary - if config.execution.input_type in ("ukbb", "hcpya"): + if config.workflow.input_type in ("ukbb", "hcpya"): from ingress2qsirecon.utils.workflows import ingress2qsirecon config.execution.bids_dir = work_dir / "bids" @@ -483,7 +483,7 @@ def parse_args(args=None, namespace=None): wf = ingress2qsirecon( config.execution.input_dir, config.execution.bids_dir, - config.execution.input_type, + config.workflow.input_type, ) # Configure the nipype workflow wf.config["execution"]["crashdump_dir"] = str(log_dir) From 44678314d212bc7522b43f7cbd20d3f4ba896413 Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Thu, 12 Sep 2024 09:40:27 -0400 Subject: [PATCH 11/19] add external ingression --- pyproject.toml | 2 +- qsirecon/cli/parser.py | 32 +++++++-- qsirecon/interfaces/anatomical.py | 32 --------- qsirecon/interfaces/ingress.py | 59 +--------------- qsirecon/workflows/base.py | 98 ++++++++------------------ qsirecon/workflows/recon/anatomical.py | 65 +---------------- 6 files changed, 60 insertions(+), 228 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 62a726f..10a3cee 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ dependencies = [ "filelock", "fury", "indexed_gzip <= 1.8.7", - "Ingress2QSIRecon == 0.1.0", + "Ingress2QSIRecon == 0.1.3", "jinja2 < 3.1", "matplotlib", "networkx ~= 2.8.8", diff --git a/qsirecon/cli/parser.py b/qsirecon/cli/parser.py index 68333ad..f57dfee 100644 --- a/qsirecon/cli/parser.py +++ b/qsirecon/cli/parser.py @@ -475,16 +475,36 @@ def parse_args(args=None, namespace=None): work_dir.mkdir(exist_ok=True, parents=True) # Run ingression if necessary - if config.workflow.input_type in ("ukbb", "hcpya"): - from ingress2qsirecon.utils.workflows import ingress2qsirecon + if config.workflow.input_type in ("hcpya", "ukb"): + import os.path as op + import shutil + import ingress2qsirecon + from ingress2qsirecon.utils.functions import create_layout + from ingress2qsirecon.utils.workflows import create_ingress2qsirecon_wf + + # Fake BIDS directory to be created config.execution.bids_dir = work_dir / "bids" - wf = ingress2qsirecon( - config.execution.input_dir, - config.execution.bids_dir, - config.workflow.input_type, + # Make fake BIDS files + ingress2recon_dir = op.dirname(ingress2qsirecon.__file__) + if not op.exists(op.join(config.execution.bids_dir, "dataset_description.json")): + shutil.copytree(op.join(ingress2recon_dir, "data", "bids_scaffold"), + config.execution.bids_dir, dirs_exist_ok=True) + + if config.execution.participant_label is None: + participants_ingression = [] + else: + participants_ingression = list(config.execution.participant_label) + layouts = create_layout(config.execution.input_dir, config.execution.bids_dir, + config.workflow.input_type, participants_ingression) + + # Create the ingression workflow + wf = create_ingress2qsirecon_wf( + layouts, + base_dir=work_dir, ) + # Configure the nipype workflow wf.config["execution"]["crashdump_dir"] = str(log_dir) wf.run() diff --git a/qsirecon/interfaces/anatomical.py b/qsirecon/interfaces/anatomical.py index 5a6607c..7582619 100644 --- a/qsirecon/interfaces/anatomical.py +++ b/qsirecon/interfaces/anatomical.py @@ -26,9 +26,6 @@ ) from nipype.utils.filemanip import fname_presuffix -from ..utils.ingress import ukb_dirname_to_bids -from .images import to_lps - LOGGER = logging.getLogger("nipype.interface") @@ -151,35 +148,6 @@ def _get_if_exists(self, name, pattern, excludes=None): self._results[name] = files[0] -class UKBAnatomicalIngressInputSpec(QSIPrepAnatomicalIngressInputSpec): - recon_input_dir = traits.Directory( - exists=True, mandatory=True, help="directory containing a single subject's results" - ) - - -class UKBAnatomicalIngress(QSIPrepAnatomicalIngress): - input_spec = UKBAnatomicalIngressInputSpec - - def _run_interface(self, runtime): - # Load the Bias-corrected brain and brain mask - input_path = Path(self.inputs.recon_input_dir) - bids_name = ukb_dirname_to_bids(self.inputs.recon_input_dir) - - ukb_brain = input_path / "T1" / "T1_unbiased_brain.nii.gz" - ukb_brain_mask = input_path / "T1" / "T1_brain_mask.nii.gz" - - conformed_t1w_file = str(Path(runtime.cwd) / (bids_name + "_desc-preproc_T1w.nii.gz")) - conformed_mask_file = str(Path(runtime.cwd) / (bids_name + "_desc-brain_mask.nii.gz")) - - to_lps(nb.load(ukb_brain)).to_filename(conformed_t1w_file) - to_lps(nb.load(ukb_brain_mask)).to_filename(conformed_mask_file) - - self._results["t1_preproc"] = conformed_t1w_file - self._results["t1_brain_mask"] = conformed_mask_file - - return runtime - - """ The spherical harmonic coefficients are stored as follows. First, since the diff --git a/qsirecon/interfaces/ingress.py b/qsirecon/interfaces/ingress.py index 3c00295..75ad422 100644 --- a/qsirecon/interfaces/ingress.py +++ b/qsirecon/interfaces/ingress.py @@ -119,61 +119,4 @@ def _get_qc_filename(self, out_root, params, desc, suffix): used_keys = ["subject_id", "session_id", "acq_id", "dir_id", "run_id"] fname = "_".join([params[key] for key in used_keys if params[key]]) return out_root + "/" + fname + "_desc-%s_dwi.%s" % (desc, suffix) - - -class _UKBioBankDWIIngressInputSpec(QSIPrepDWIIngressInputSpec): - dwi_file = File(exists=False, help="The name of what a BIDS dwi file may have been") - data_dir = traits.Directory( - exists=True, help="The UKB data directory for a subject. Must contain DTI/ and T1/" - ) - - -class UKBioBankDWIIngress(SimpleInterface): - input_spec = _UKBioBankDWIIngressInputSpec - output_spec = QSIPrepDWIIngressOutputSpec - - def _run_interface(self, runtime): - runpath = Path(runtime.cwd) - - # The UKB input files - in_dir = Path(self.inputs.data_dir) - dwi_dir = in_dir / "DTI" / "dMRI" / "dMRI" - ukb_bval_file = dwi_dir / "bvals" - ukb_bvec_file = dwi_dir / "bvecs" # These are the same as eddy rotated - ukb_dwi_file = dwi_dir / "data_ud.nii.gz" - ukb_dwiref_file = dwi_dir / "dti_FA.nii.gz" - - # The bids_name is what the images will be renamed to - bids_name = Path(self.inputs.dwi_file).name.replace(".nii.gz", "") - dwi_file = str(runpath / (bids_name + ".nii.gz")) - bval_file = str(runpath / (bids_name + ".bval")) - bvec_file = str(runpath / (bids_name + ".bvec")) - b_file = str(runpath / (bids_name + ".b")) - btable_file = str(runpath / (bids_name + "btable.txt")) - dwiref_file = str(runpath / (bids_name.replace("_dwi", "_dwiref") + ".nii.gz")) - - dwi_conform = ConformDwi( - dwi_file=str(ukb_dwi_file), bval_file=str(ukb_bval_file), bvec_file=str(ukb_bvec_file) - ) - - result = dwi_conform.run() - Path(result.outputs.dwi_file).rename(dwi_file) - Path(result.outputs.bvec_file).rename(bvec_file) - shutil.copyfile(result.outputs.bval_file, bval_file) - # Reorient the dwi file to LPS+ - self._results["dwi_file"] = dwi_file - self._results["bvec_file"] = bvec_file - self._results["bval_file"] = bval_file - - # Create a btable_txt file for DSI Studio - btable_from_bvals_bvecs(bval_file, bvec_file, btable_file) - self._results["btable_file"] = btable_file - - # Create a mrtrix .b file - _convert_fsl_to_mrtrix(bval_file, bvec_file, b_file) - self._results["b_file"] = b_file - - # Create a dwi ref file - to_lps(nb.load(ukb_dwiref_file)).to_filename(dwiref_file) - self._results["dwi_ref"] = dwiref_file - return runtime + \ No newline at end of file diff --git a/qsirecon/workflows/base.py b/qsirecon/workflows/base.py index f9ecc1f..5150ccb 100644 --- a/qsirecon/workflows/base.py +++ b/qsirecon/workflows/base.py @@ -32,23 +32,12 @@ def init_qsirecon_wf(): qsirecon_wf = Workflow(name=f"qsirecon_{ver.major}_{ver.minor}_wf") qsirecon_wf.base_dir = config.execution.work_dir - if config.workflow.input_type not in ("qsiprep", "ukb"): + if config.workflow.input_type not in ("qsiprep", "hcpya", "ukb"): raise NotImplementedError( f"{config.workflow.input_type} is not supported as recon-input yet." ) - if config.workflow.input_type == "qsiprep": - # This should work for --recon-input as long as the same dataset is in bids_dir - # or if the call is doing preproc+recon - to_recon_list = config.execution.participant_label - elif config.workflow.input_type == "ukb": - from ..utils.ingress import collect_ukb_participants, create_ukb_layout - - # The ukb input will always be specified as the bids input - we can't preproc it first - ukb_layout = create_ukb_layout(config.execution.bids_dir) - to_recon_list = collect_ukb_participants( - ukb_layout, participant_label=config.execution.participant_label - ) + to_recon_list = config.execution.participant_label for subject_id in to_recon_list: single_subject_wf = init_single_subject_recon_wf(subject_id=subject_id) @@ -86,7 +75,7 @@ def init_single_subject_recon_wf(subject_id): Single subject label """ from ..interfaces.bids import DerivativesDataSink - from ..interfaces.ingress import QSIPrepDWIIngress, UKBioBankDWIIngress + from ..interfaces.ingress import QSIPrepDWIIngress from ..interfaces.interchange import ( ReconWorkflowInputs, anatomical_workflow_outputs, @@ -138,17 +127,16 @@ def init_single_subject_recon_wf(subject_id): # This is here because qsiprep currently only makes one anatomical result per subject # regardless of sessions. So process it on its - if config.workflow.input_type == "qsiprep": - anat_ingress_node, available_anatomical_data = init_highres_recon_anatomical_wf( - subject_id=subject_id, - extras_to_make=spec.get("anatomical", []), - needs_t1w_transform=needs_t1w_transform, - ) + anat_ingress_node, available_anatomical_data = init_highres_recon_anatomical_wf( + subject_id=subject_id, + extras_to_make=spec.get("anatomical", []), + needs_t1w_transform=needs_t1w_transform, + ) - # Connect the anatomical-only inputs. NOTE this is not to the inputnode! - config.loggers.workflow.info( - "Anatomical (T1w) available for recon: %s", available_anatomical_data - ) + # Connect the anatomical-only inputs. NOTE this is not to the inputnode! + config.loggers.workflow.info( + "Anatomical (T1w) available for recon: %s", available_anatomical_data + ) aggregate_anatomical_nodes = pe.Node( niu.Merge(len(dwi_recon_inputs)), @@ -168,28 +156,11 @@ def init_single_subject_recon_wf(subject_id): wf_name = _get_wf_name(dwi_file) # Get the preprocessed DWI and all the related preprocessed images - if config.workflow.input_type == "qsiprep": - dwi_ingress_nodes[dwi_file] = pe.Node( - QSIPrepDWIIngress(dwi_file=dwi_file), - name=f"{wf_name}_ingressed_dwi_data", - ) - anat_ingress_nodes[dwi_file] = anat_ingress_node - - elif config.workflow.input_type == "ukb": - dwi_ingress_nodes[dwi_file] = pe.Node( - UKBioBankDWIIngress(dwi_file=dwi_file, data_dir=str(dwi_input["path"].absolute())), - name=f"{wf_name}_ingressed_ukb_dwi_data", - ) - anat_ingress_nodes[dwi_file], available_anatomical_data = ( - init_highres_recon_anatomical_wf( - subject_id=subject_id, - recon_input_dir=dwi_input["path"], - extras_to_make=spec.get("anatomical", []), - pipeline_source="ukb", - needs_t1w_transform=needs_t1w_transform, - name=f"{wf_name}_ingressed_ukb_anat_data", - ) - ) + dwi_ingress_nodes[dwi_file] = pe.Node( + QSIPrepDWIIngress(dwi_file=dwi_file), + name=f"{wf_name}_ingressed_dwi_data", + ) + anat_ingress_nodes[dwi_file] = anat_ingress_node # Aggregate the anatomical data from all the dwi files workflow.connect([ @@ -404,26 +375,19 @@ def _get_iterable_dwi_inputs(subject_id): the other files needed. """ - from ..utils.ingress import create_ukb_layout dwi_dir = config.execution.bids_dir - if config.workflow.input_type == "qsiprep": - if not (dwi_dir / f"sub-{subject_id}").exists(): - raise Exception(f"Unable to find subject directory in {config.execution.bids_dir}") - - layout = BIDSLayout(dwi_dir, validate=False, absolute_paths=True) - # Get all the output files that are in this space - dwi_files = [ - f.path - for f in layout.get( - suffix="dwi", subject=subject_id, absolute_paths=True, extension=["nii", "nii.gz"] - ) - if "space-T1w" in f.filename - ] - config.loggers.workflow.info("found %s in %s", dwi_files, dwi_dir) - return [{"bids_dwi_file": dwi_file} for dwi_file in dwi_files] - - if config.workflow.input_type == "ukb": - return create_ukb_layout(ukb_dir=config.execution.bids_dir, participant_label=subject_id) - - raise Exception("Unknown pipeline " + config.workflow.input_type) + if not (dwi_dir / f"sub-{subject_id}").exists(): + raise Exception(f"Unable to find subject directory in {config.execution.bids_dir}") + + layout = BIDSLayout(dwi_dir, validate=False, absolute_paths=True) + # Get all the output files that are in this space + dwi_files = [ + f.path + for f in layout.get( + suffix="dwi", subject=subject_id, absolute_paths=True, extension=["nii", "nii.gz"] + ) + if "space-T1w" in f.filename + ] + config.loggers.workflow.info("found %s in %s", dwi_files, dwi_dir) + return [{"bids_dwi_file": dwi_file} for dwi_file in dwi_files] diff --git a/qsirecon/workflows/recon/anatomical.py b/qsirecon/workflows/recon/anatomical.py index a2d06d6..a6b7a54 100644 --- a/qsirecon/workflows/recon/anatomical.py +++ b/qsirecon/workflows/recon/anatomical.py @@ -17,7 +17,6 @@ from ...interfaces.anatomical import ( GetTemplate, QSIPrepAnatomicalIngress, - UKBAnatomicalIngress, VoxelSizeChooser, ) from ...interfaces.ants import ConvertTransformFile @@ -46,8 +45,6 @@ "surf/rh.pial", ] -UKB_REQUIREMENTS = ["T1/T1_brain.nii.gz", "T1/T1_brain_mask.nii.gz"] - # Files that must exist if QSIRecon ran the anatomical workflow QSIRECON_ANAT_REQUIREMENTS = [ "sub-{subject_id}/anat/sub-{subject_id}_desc-brain_mask.nii.gz", @@ -82,15 +79,9 @@ def init_highres_recon_anatomical_wf( # are present. The anat_ingress_node is a nipype node that ensures that qsiprep-style # anatomical data is available. In the case where ``pipeline_source`` is not "qsiprep", # the data is converted in this node to be qsiprep-like. - pipeline_source = config.workflow.input_type freesurfer_dir = config.execution.freesurfer_input qsirecon_suffix = "" - if pipeline_source == "qsiprep": - anat_ingress_node, status = gather_qsiprep_anatomical_data(subject_id) - elif pipeline_source == "ukb": - anat_ingress_node, status = gather_ukb_anatomical_data(subject_id) - else: - raise Exception(f"Unknown pipeline source '{pipeline_source}'") + anat_ingress_node, status = gather_qsiprep_anatomical_data(subject_id) anat_ingress_node.inputs.infant_mode = config.workflow.infant if needs_t1w_transform and not status["has_qsiprep_t1w_transforms"]: raise Exception("Cannot compute to-template") @@ -217,44 +208,6 @@ def init_highres_recon_anatomical_wf( return workflow, status -def gather_ukb_anatomical_data(subject_id): - """ - Check a UKB directory for the necessary files for recon workflows. - - Parameters - ---------- - subject_id : str - List of subject labels - - """ - status = { - "has_qsiprep_5tt_hsvs": False, - "has_freesurfer_5tt_hsvs": False, - "has_freesurfer": False, - } - recon_input_dir = config.execution.bids_dir - - # Check to see if we have a T1w preprocessed by QSIRecon - missing_ukb_anats = check_ukb_anatomical_outputs(recon_input_dir) - has_t1w = not missing_ukb_anats - status["has_qsiprep_t1w"] = has_t1w - if missing_ukb_anats: - config.loggers.workflow.info(f"Missing T1w from UKB session: {recon_input_dir}") - else: - config.loggers.workflow.info("Found usable UKB-preprocessed T1w image and mask.") - anat_ingress = pe.Node( - UKBAnatomicalIngress(subject_id=subject_id, recon_input_dir=recon_input_dir), - name="ukb_anat_ingress", - ) - - # I couldn't figure out how to convert UKB transforms to ants. So - # they're not available for recon workflows for now - status["has_qsiprep_t1w_transforms"] = False - config.loggers.workflow.info("QSIRecon can't read FNIRT transforms from UKB at this time.") - - return anat_ingress, status - - def gather_qsiprep_anatomical_data(subject_id): """ Gathers the anatomical data from a QSIPrep input and finds which files are available. @@ -357,22 +310,6 @@ def check_qsiprep_anatomical_outputs(recon_input_dir, subject_id, anat_type): return missing -def check_ukb_anatomical_outputs(recon_input_dir): - """Check for required files under a UKB session directory. - - Parameters: - - recon_input_dir: pathlike - Path to a UKB subject directory (eg 1234567_2_0) - """ - missing = [] - recon_input_path = Path(recon_input_dir) - for requirement in UKB_REQUIREMENTS: - if not (recon_input_path / requirement).exists(): - missing.append(str(requirement)) - return missing - - def init_register_fs_to_qsiprep_wf( use_qsiprep_reference_mask=False, name="register_fs_to_qsiprep_wf" ): From 08478ea82d40379287076f2e983f3941577b3e38 Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Thu, 12 Sep 2024 09:43:53 -0400 Subject: [PATCH 12/19] linting --- qsirecon/cli/parser.py | 15 +++++++++++---- qsirecon/interfaces/ingress.py | 7 ------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/qsirecon/cli/parser.py b/qsirecon/cli/parser.py index f57dfee..d349cd4 100644 --- a/qsirecon/cli/parser.py +++ b/qsirecon/cli/parser.py @@ -489,15 +489,22 @@ def parse_args(args=None, namespace=None): # Make fake BIDS files ingress2recon_dir = op.dirname(ingress2qsirecon.__file__) if not op.exists(op.join(config.execution.bids_dir, "dataset_description.json")): - shutil.copytree(op.join(ingress2recon_dir, "data", "bids_scaffold"), - config.execution.bids_dir, dirs_exist_ok=True) + shutil.copytree( + op.join(ingress2recon_dir, "data", "bids_scaffold"), + config.execution.bids_dir, + dirs_exist_ok=True, + ) if config.execution.participant_label is None: participants_ingression = [] else: participants_ingression = list(config.execution.participant_label) - layouts = create_layout(config.execution.input_dir, config.execution.bids_dir, - config.workflow.input_type, participants_ingression) + layouts = create_layout( + config.execution.input_dir, + config.execution.bids_dir, + config.workflow.input_type, + participants_ingression, + ) # Create the ingression workflow wf = create_ingress2qsirecon_wf( diff --git a/qsirecon/interfaces/ingress.py b/qsirecon/interfaces/ingress.py index 75ad422..68bea17 100644 --- a/qsirecon/interfaces/ingress.py +++ b/qsirecon/interfaces/ingress.py @@ -20,11 +20,8 @@ """ import os.path as op -import shutil from glob import glob -from pathlib import Path -import nibabel as nb from nipype import logging from nipype.interfaces.base import ( BaseInterfaceInputSpec, @@ -36,9 +33,6 @@ from nipype.utils.filemanip import split_filename from .bids import get_bids_params -from .dsi_studio import btable_from_bvals_bvecs -from .images import ConformDwi, to_lps -from .mrtrix import _convert_fsl_to_mrtrix LOGGER = logging.getLogger("nipype.interface") @@ -119,4 +113,3 @@ def _get_qc_filename(self, out_root, params, desc, suffix): used_keys = ["subject_id", "session_id", "acq_id", "dir_id", "run_id"] fname = "_".join([params[key] for key in used_keys if params[key]]) return out_root + "/" + fname + "_desc-%s_dwi.%s" % (desc, suffix) - \ No newline at end of file From b6305b9f294f7d01ea90035c476ffd5fbfe37903 Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Thu, 12 Sep 2024 09:45:30 -0400 Subject: [PATCH 13/19] linting: the sequel --- qsirecon/interfaces/anatomical.py | 1 - 1 file changed, 1 deletion(-) diff --git a/qsirecon/interfaces/anatomical.py b/qsirecon/interfaces/anatomical.py index 7582619..4a4237b 100644 --- a/qsirecon/interfaces/anatomical.py +++ b/qsirecon/interfaces/anatomical.py @@ -11,7 +11,6 @@ import os.path as op from glob import glob -from pathlib import Path import nibabel as nb import numpy as np From 897a78035ecf60d4bdf8058f132b853e2150880b Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Thu, 12 Sep 2024 10:11:01 -0400 Subject: [PATCH 14/19] update qsirecon_build --- .circleci/config.yml | 2 +- Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 389b92a..e0b72e2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -4,7 +4,7 @@ orbs: .dockersetup: &dockersetup docker: - - image: pennlinc/qsirecon_build:24.8.3 + - image: pennlinc/qsirecon_build:24.9.0 working_directory: /src/qsirecon runinstall: &runinstall diff --git a/Dockerfile b/Dockerfile index d70c4fe..41c814c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,7 +5,7 @@ RUN pip install build RUN apt-get update && \ apt-get install -y --no-install-recommends git -FROM pennlinc/qsirecon_build:24.8.3 +FROM pennlinc/qsirecon_build:24.9.0 # Install qsirecon COPY . /src/qsirecon From 0df29334a525fbb49f574d65a83da0b3922d24df Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Fri, 13 Sep 2024 11:44:06 -0400 Subject: [PATCH 15/19] use acres for loading data --- pyproject.toml | 2 +- qsirecon/cli/parser.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 10a3cee..17f609f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ dependencies = [ "filelock", "fury", "indexed_gzip <= 1.8.7", - "Ingress2QSIRecon == 0.1.3", + "Ingress2QSIRecon == 0.1.4", "jinja2 < 3.1", "matplotlib", "networkx ~= 2.8.8", diff --git a/qsirecon/cli/parser.py b/qsirecon/cli/parser.py index d349cd4..f80e462 100644 --- a/qsirecon/cli/parser.py +++ b/qsirecon/cli/parser.py @@ -479,7 +479,7 @@ def parse_args(args=None, namespace=None): import os.path as op import shutil - import ingress2qsirecon + from ingress2qsirecon.data import load_resource from ingress2qsirecon.utils.functions import create_layout from ingress2qsirecon.utils.workflows import create_ingress2qsirecon_wf @@ -487,10 +487,10 @@ def parse_args(args=None, namespace=None): config.execution.bids_dir = work_dir / "bids" # Make fake BIDS files - ingress2recon_dir = op.dirname(ingress2qsirecon.__file__) + bids_scaffold = str(load_resource("bids_scaffold/")) if not op.exists(op.join(config.execution.bids_dir, "dataset_description.json")): shutil.copytree( - op.join(ingress2recon_dir, "data", "bids_scaffold"), + bids_scaffold, config.execution.bids_dir, dirs_exist_ok=True, ) From 297c1ebec20e580fb387114dd22f8c684ce75a7d Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Thu, 19 Sep 2024 14:09:18 -0400 Subject: [PATCH 16/19] update i2q version --- docs/input_data.rst | 5 ++++- pyproject.toml | 2 +- qsirecon/cli/parser.py | 10 +++++----- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/docs/input_data.rst b/docs/input_data.rst index 998fca4..1c4ab78 100644 --- a/docs/input_data.rst +++ b/docs/input_data.rst @@ -59,7 +59,10 @@ HCP Young Adult Preprocessed Data ================================= To use minimally preprocessed dMRI data from HCP-YA specify ``--input-type hcpya``. -The included FNIRT transforms are usable directly. NOTE: this does not work yet. +The included FNIRT transforms are usable directly. Please note that if you have the +HCPYA dataset from datalad (https://github.com/datalad-datasets/human-connectome-project-openaccess) +then you should ``datalad get`` relevant subject data before running QSIRecon, +and be mindful about how you mount the directory in Docker/Apptainer. .. _anat_reqs: diff --git a/pyproject.toml b/pyproject.toml index 17f609f..cfb87c7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ dependencies = [ "filelock", "fury", "indexed_gzip <= 1.8.7", - "Ingress2QSIRecon == 0.1.4", + "Ingress2QSIRecon == 0.2.0", "jinja2 < 3.1", "matplotlib", "networkx ~= 2.8.8", diff --git a/qsirecon/cli/parser.py b/qsirecon/cli/parser.py index f80e462..f575666 100644 --- a/qsirecon/cli/parser.py +++ b/qsirecon/cli/parser.py @@ -118,11 +118,11 @@ def _bids_filter(value, parser): parser.add_argument( "input_dir", action="store", - metavar="bids_dir", + metavar="input_dir", type=PathExists, help=( - "The root folder of a BIDS valid dataset " - "(sub-XXXXX folders should be found at the top level in this folder). " + "The root folder of the input dataset " + "(subject-level folders should be found at the top level in this folder). " "If the dataset is not BIDS-valid, " "then a BIDS-compliant version will be created based on the --input-type value." ), @@ -299,7 +299,7 @@ def _bids_filter(value, parser): default="qsiprep", choices=["qsiprep", "ukb", "hcpya"], help=( - "Specify which pipeline was used to create the data specified as the bids_dir." + "Specify which pipeline was used to create the data specified as the input_dir." "Not necessary to specify if the data was processed by QSIPrep. " "Other options include " '"ukb" for data processed with the UK BioBank minimal preprocessing pipeline and ' @@ -487,7 +487,7 @@ def parse_args(args=None, namespace=None): config.execution.bids_dir = work_dir / "bids" # Make fake BIDS files - bids_scaffold = str(load_resource("bids_scaffold/")) + bids_scaffold = str(load_resource("bids_scaffold")) if not op.exists(op.join(config.execution.bids_dir, "dataset_description.json")): shutil.copytree( bids_scaffold, From 04491afd155472aa38d19128d14c4bea01778274 Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Thu, 19 Sep 2024 16:21:27 -0400 Subject: [PATCH 17/19] update i2q for acres dependency --- docs/input_data.rst | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/input_data.rst b/docs/input_data.rst index 1c4ab78..0ef7622 100644 --- a/docs/input_data.rst +++ b/docs/input_data.rst @@ -59,7 +59,7 @@ HCP Young Adult Preprocessed Data ================================= To use minimally preprocessed dMRI data from HCP-YA specify ``--input-type hcpya``. -The included FNIRT transforms are usable directly. Please note that if you have the +Note that the transforms to/from MNI space are not able to be used at this time. Please note that if you have the HCPYA dataset from datalad (https://github.com/datalad-datasets/human-connectome-project-openaccess) then you should ``datalad get`` relevant subject data before running QSIRecon, and be mindful about how you mount the directory in Docker/Apptainer. diff --git a/pyproject.toml b/pyproject.toml index cfb87c7..573614c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ dependencies = [ "filelock", "fury", "indexed_gzip <= 1.8.7", - "Ingress2QSIRecon == 0.2.0", + "Ingress2QSIRecon == 0.2.1", "jinja2 < 3.1", "matplotlib", "networkx ~= 2.8.8", From 6e096ca5f8dd8dfd7b8b138e8a843d6a7cac77b0 Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Mon, 23 Sep 2024 09:54:04 -0400 Subject: [PATCH 18/19] repin nilearn --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 573614c..583f717 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ dependencies = [ "matplotlib", "networkx ~= 2.8.8", "nibabel <= 6.0.0", - "nilearn", + "nilearn == 0.10.1", "nipype == 1.8.6", "nireports ~= 24.0.2", "niworkflows >=1.9,<= 1.10", From 69f3fd7b4060b523fffef5e64cecf95a34bf23d7 Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Mon, 23 Sep 2024 10:00:12 -0400 Subject: [PATCH 19/19] Update parser.py --- qsirecon/cli/parser.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/qsirecon/cli/parser.py b/qsirecon/cli/parser.py index f575666..f0143ab 100644 --- a/qsirecon/cli/parser.py +++ b/qsirecon/cli/parser.py @@ -476,7 +476,6 @@ def parse_args(args=None, namespace=None): # Run ingression if necessary if config.workflow.input_type in ("hcpya", "ukb"): - import os.path as op import shutil from ingress2qsirecon.data import load_resource @@ -487,8 +486,8 @@ def parse_args(args=None, namespace=None): config.execution.bids_dir = work_dir / "bids" # Make fake BIDS files - bids_scaffold = str(load_resource("bids_scaffold")) - if not op.exists(op.join(config.execution.bids_dir, "dataset_description.json")): + bids_scaffold = load_resource("bids_scaffold") + if not (config.execution.bids_dir / "dataset_description.json").exists(): shutil.copytree( bids_scaffold, config.execution.bids_dir,