Skip to content

Commit

Permalink
Initial changes to support -first_dir option in 5ttgen / labelsgmfirst
Browse files Browse the repository at this point in the history
  • Loading branch information
Lestropie committed Nov 24, 2024
1 parent e80c5a6 commit 67c79a8
Show file tree
Hide file tree
Showing 9 changed files with 251 additions and 115 deletions.
116 changes: 69 additions & 47 deletions python/mrtrix3/commands/5ttgen/fsl.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,10 +59,12 @@ def usage(base_parser, subparsers): #pylint: disable=unused-variable
default=None,
help='Indicate that brain masking has already been applied to the input image')
options.add_argument('-first_dir',
type=app.Parser.DirectoryIn(),
metavar='/path/to/first/dir',
help='use pre-calculated output of FSL FIRST previously run on input T1-weighted image; '
'data must be defined in the same space as input T1w')
options.add_argument('-fast_dir',
type=app.Parser.DirectoryIn(),
metavar='/path/to/fast/dir',
help='use pre-calculated output of FSL FAST previously run on input T1-weighted image; '
'data must be defined in the same space as input T1w; '
Expand All @@ -81,11 +83,42 @@ def execute(): #pylint: disable=unused-variable
raise MRtrixError('Environment variable FSLDIR is not set; '
'please run appropriate FSL configuration script')

sgm_structures = [ 'L_Accu', 'R_Accu', 'L_Caud', 'R_Caud', 'L_Pall', 'R_Pall', 'L_Puta', 'R_Puta', 'L_Thal', 'R_Thal' ]
if app.ARGS.sgm_amyg_hipp:
sgm_structures.extend([ 'L_Amyg', 'R_Amyg', 'L_Hipp', 'R_Hipp' ])

bet_cmd = fsl.exe_name('bet')
fast_cmd = fsl.exe_name('fast')
first_cmd = fsl.exe_name('run_first_all')
ssroi_cmd = fsl.exe_name('standard_space_roi')

fast_cmd = None
fast_pve_images = []
if app.ARGS.fast_dir:
pve_candidates = sorted(app.ARGS.fast_dir.glob('*_pve_*.nii*'))
if len(pve_candidates) != 3:
raise MRtrixError(f'Expected three partial volume images in FAST directory {app.ARGS.fast_dir}; '
f'found {len(pve_candidates)}')
for index, filepath in enumerate(pve_candidates):
if not f'_pve_{index}.nii' in filepath:
raise MRtrixError('Expected FAST PVE image "*_pve_*.nii*"; '
f'found "{filepath.name}"')
fast_pve_images.append(filepath.name)
else:
try:
fast_cmd = fsl.exe_name('fast')
except MRtrixError as e:
raise MRtrixError('FSL program "fast" is requisite if -fast_dir option is not used') from e

first_cmd = None
first_object_files = []
if app.ARGS.first_dir:
first_prefix = fsl.check_first_input(app.ARGS.first_dir, sgm_structures)
first_object_files = [app.ARGS.first_dir / f'{first_prefix}{struct}_first.vtk' for struct in sgm_structures]
else:
try:
first_cmd = fsl.exe_name('run_first_all')
except MRtrixError as e:
raise MRtrixError('FSL program "run_first_all" is requisite if -first_dir option is not used') from e

first_atlas_path = os.path.join(fsl_path, 'data', 'first', 'models_336_bin')
if not os.path.isdir(first_atlas_path):
raise MRtrixError('Atlases required for FSL\'s FIRST program not installed; '
Expand All @@ -107,15 +140,20 @@ def execute(): #pylint: disable=unused-variable
raise MRtrixError('Provided T2w image does not match input T1w image')
run.command(['mrconvert', app.ARGS.t2, 'T2.nii', '-strides', '-1,+2,+3'],
preserve_pipes=True)

sgm_structures = [ 'L_Accu', 'R_Accu', 'L_Caud', 'R_Caud', 'L_Pall', 'R_Pall', 'L_Puta', 'R_Puta', 'L_Thal', 'R_Thal' ]
if app.ARGS.sgm_amyg_hipp:
sgm_structures.extend([ 'L_Amyg', 'R_Amyg', 'L_Hipp', 'R_Hipp' ])
if app.ARGS.fast_dir:
for name in fast_pve_images:
shutil.copyfile(app.ARGS.fast_dir / name, app.SCRATCH_DIR / name)
if app.ARGS.first_dir:
progress = app.ProgressBar('Importing pre-calculated FIRST segmentations', len(sgm_structures))
for filename in first_object_files:
run.function(shutil.copyfile, app.ARGS.first_dir / filename, app.SCRATCH_DIR / filename)
progress.increment()
progress.done()

t1_spacing = image.Header('input.mif').spacing()
upsample_for_first = False
# If voxel size is 1.25mm or larger, make a guess that the user has erroneously re-gridded their data
if math.pow(t1_spacing[0] * t1_spacing[1] * t1_spacing[2], 1.0/3.0) > 1.225:
if not app.ARGS.first_dir and math.pow(t1_spacing[0] * t1_spacing[1] * t1_spacing[2], 1.0/3.0) > 1.225:
app.warn(f'Voxel size larger than expected for T1-weighted images ({t1_spacing}); '
f'note that ACT does not require re-gridding of T1 image to DWI space, and indeed '
f'retaining the original higher resolution of the T1 image is preferable')
Expand Down Expand Up @@ -200,53 +238,41 @@ def execute(): #pylint: disable=unused-variable

# FAST
if not app.ARGS.fast_dir:
assert not fast_pve_images
if fast_t2_input:
run.command(f'{fast_cmd} -S 2 {fast_t2_input} {fast_t1_input}')
else:
run.command(f'{fast_cmd} {fast_t1_input}')
if app.ARGS.fast_dir:
if not os.path.isdir(os.path.abspath(app.ARGS.fast_dir)):
raise MRtrixError('FAST directory cannot be found, please check path')
fast_output_prefix = fast_t1_input.split('.')[0]
fast_csf_input = fast_output_prefix + 'pve_0.nii.gz'
fast_gm_input = fast_output_prefix + '_pve_1.nii.gz'
fast_wm_input = fast_output_prefix + '_pve_2.nii.gz'
run.command('cp ' + path.from_user(fast_csf_input) + ' ' + path.to_scratch(fast_csf_input))
run.command('cp ' + path.from_user(fast_gm_input) + ' ' + path.to_scratch(fast_gm_input))
run.command('cp ' + path.from_user(fast_wm_input) + ' ' + path.to_scratch(fast_wm_input))
for index in range(0, 3):
fast_pve_images.append(fsl.find_image(f'{fast_t1_input}_pve_{index}'))

# FIRST
first_input = 'T1.nii'
if upsample_for_first:
app.warn('Generating 1mm isotropic T1 image for FIRST in hope of preventing failure, '
'since input image is of lower resolution')
run.command('mrgrid T1.nii regrid T1_1mm.nii -voxel 1.0 -interp sinc')
first_input = 'T1_1mm.nii'
first_brain_extracted_option = ['-b'] if app.ARGS.premasked else []
first_debug_option = [] if app.DO_CLEANUP else ['-d']
first_verbosity_option = ['-v'] if app.VERBOSITY == 3 else []
# TODO Preferably find a suitable reference image inside the FIRST directory
first_reference = 'T1.nii'
if not app.ARGS.first_dir:
run.command([first_cmd, '-m', 'none', '-s', ','.join(sgm_structures), '-i', first_input, '-o', 'first']
assert not first_object_files
if upsample_for_first:
app.warn('Generating 1mm isotropic T1 image for FIRST in hope of preventing failure, '
'since input image is of lower resolution')
run.command('mrgrid T1.nii regrid T1_1mm.nii -voxel 1.0 -interp sinc')
first_reference = 'T1_1mm.nii'
first_brain_extracted_option = ['-b'] if app.ARGS.premasked else []
first_debug_option = [] if app.DO_CLEANUP else ['-d']
first_verbosity_option = ['-v'] if app.VERBOSITY == 3 else []
run.command([first_cmd, '-m', 'none', '-s', ','.join(sgm_structures), '-i', first_reference, '-o', 'first']
+ first_brain_extracted_option
+ first_debug_option
+ first_verbosity_option)
elif app.ARGS.first_dir:
if not os.path.isdir(os.path.abspath(app.ARGS.first_dir)):
raise MRtrixError('FIRST directory cannot be found, please check path')
for struct in sgm_structures:
vtk_in_path = 'first-' + struct + '_first.vtk'
run.command('cp ' + app.ARGS.first_dir + '/' + vtk_in_path + ' .')
run.command('cp -r ' + app.ARGS.first_dir + '/first.logs' + ' .')
fsl.check_first('first', sgm_structures)
fsl.check_first_output('first', sgm_structures)
first_object_files = [f'first-{struct}_first.vtk' for struct in sgm_structures]

# Convert FIRST meshes to partial volume images
pve_image_list = [ ]
progress = app.ProgressBar('Generating partial volume images for SGM structures', len(sgm_structures))
for struct in sgm_structures:
for struct, vtk_in_path in zip(sgm_structures, first_object_files):
pve_image_path = f'mesh2voxel_{struct}.mif'
vtk_in_path = f'first-{struct}_first.vtk'
vtk_temp_path = f'{struct}.vtk'
run.command(['meshconvert', vtk_in_path, vtk_temp_path, '-transform', 'first2real', first_input])
run.command(['meshconvert', vtk_in_path, vtk_temp_path, '-transform', 'first2real', first_reference])
run.command(['mesh2voxel', vtk_temp_path, fast_t1_input, pve_image_path])
pve_image_list.append(pve_image_path)
progress.increment()
Expand All @@ -255,24 +281,20 @@ def execute(): #pylint: disable=unused-variable
'mrcalc', '-', '1.0', '-min', 'all_sgms.mif'])

# Combine the tissue images into the 5TT format within the script itself
fast_output_prefix = fast_t1_input.split('.')[0]
fast_csf_output = fsl.find_image(f'{fast_output_prefix}_pve_0')
fast_gm_output = fsl.find_image(f'{fast_output_prefix}_pve_1')
fast_wm_output = fsl.find_image(f'{fast_output_prefix}_pve_2')
# Step 1: Run LCC on the WM image
run.command(f'mrthreshold {fast_wm_output} - -abs 0.001 | '
run.command(f'mrthreshold {fast_pve_images[2]} - -abs 0.001 | '
f'maskfilter - connect - -connectivity | '
f'mrcalc 1 - 1 -gt -sub remove_unconnected_wm_mask.mif -datatype bit')
# Step 2: Generate the images in the same fashion as the old 5ttgen binary used to:
# - Preserve CSF as-is
# - Preserve SGM, unless it results in a sum of volume fractions greater than 1, in which case clamp
# - Multiply the FAST volume fractions of GM and CSF, so that the sum of CSF, SGM, CGM and WM is 1.0
run.command(f'mrcalc {fast_csf_output} remove_unconnected_wm_mask.mif -mult csf.mif')
run.command(f'mrcalc {fast_pve_images[0]} remove_unconnected_wm_mask.mif -mult csf.mif')
run.command('mrcalc 1.0 csf.mif -sub all_sgms.mif -min sgm.mif')
run.command(f'mrcalc 1.0 csf.mif sgm.mif -add -sub {fast_gm_output} {fast_wm_output} -add -div multiplier.mif')
run.command(f'mrcalc 1.0 csf.mif sgm.mif -add -sub {fast_pve_images[1]} {fast_pve_images[2]} -add -div multiplier.mif')
run.command('mrcalc multiplier.mif -finite multiplier.mif 0.0 -if multiplier_noNAN.mif')
run.command(f'mrcalc {fast_gm_output} multiplier_noNAN.mif -mult remove_unconnected_wm_mask.mif -mult cgm.mif')
run.command(f'mrcalc {fast_wm_output} multiplier_noNAN.mif -mult remove_unconnected_wm_mask.mif -mult wm.mif')
run.command(f'mrcalc {fast_pve_images[1]} multiplier_noNAN.mif -mult remove_unconnected_wm_mask.mif -mult cgm.mif')
run.command(f'mrcalc {fast_pve_images[2]} multiplier_noNAN.mif -mult remove_unconnected_wm_mask.mif -mult wm.mif')
run.command('mrcalc 0 wm.mif -min path.mif')
run.command('mrcat cgm.mif sgm.mif wm.mif csf.mif path.mif - -axis 3 | '
'mrconvert - combined_precrop.mif -strides +2,+3,+4,+1')
Expand Down
90 changes: 50 additions & 40 deletions python/mrtrix3/commands/5ttgen/hsvs.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ def usage(base_parser, subparsers): #pylint: disable=unused-variable
default=None,
help='Classify the brainstem as white matter')
parser.add_argument('-first_dir',
type=app.Parser.DirectoryIn(),
metavar='/path/to/first/dir',
help='utilise pre-calculated output of FSL FIRST run on input T1-weighted image; '
'must have been computed in the same space as FreeSurfer T1w')
Expand Down Expand Up @@ -182,7 +183,9 @@ def execute(): #pylint: disable=unused-variable

# Most freeSurfer files will be accessed in-place; no need to pre-convert them into the temporary directory
# However convert aparc image so that it does not have to be repeatedly uncompressed
run.command(['mrconvert', os.path.join(app.ARGS.input, 'mri', 'aparc+aseg.mgz'), 'aparc.mif'],
# TODO See if this can be deferred until later
aparc_image = 'aparc.mif'
run.command(['mrconvert', os.path.join(app.ARGS.input, 'mri', 'aparc+aseg.mgz'), aparc_image],
preserve_pipes=True)
if app.ARGS.template:
run.command(['mrconvert', app.ARGS.template, 'template.mif', '-axes', '0,1,2'],
Expand All @@ -192,10 +195,8 @@ def execute(): #pylint: disable=unused-variable
mri_dir = os.path.join(app.ARGS.input, 'mri')
check_dir(surf_dir)
check_dir(mri_dir)
aparc_image = 'aparc.mif'
mask_image = os.path.join(mri_dir, 'brainmask.mgz')
reg_file = os.path.join(mri_dir, 'transforms', 'talairach.xfm')
check_file(aparc_image)
check_file(mask_image)
check_file(reg_file)
template_image = 'template.mif' if app.ARGS.template else aparc_image
Expand All @@ -220,17 +221,22 @@ def execute(): #pylint: disable=unused-variable
else:
fast_suffix = '.nii.gz'
else:
app.warn('Could not find FSL program fast; script will not use fast for cerebellar tissue segmentation')
# Verify FIRST availability
app.warn('Could not find FSL program fast; '
'script will not use fast for cerebellar tissue segmentation')
else:
app.warn('Environment variable FSLDIR is not set; '
'script will run without FSL components')
# Verify that FIRST data will be available,
# whether from an input directory or by executing internally
if app.ARGS.first_dir:
have_first = True
else:
try:
first_cmd = fsl.exe_name('run_first_all')
except MRtrixError:
first_cmd = None
first_atlas_path = os.path.join(fsl_path, 'data', 'first', 'models_336_bin')
have_first = first_cmd and os.path.isdir(first_atlas_path)
else:
app.warn('Environment variable FSLDIR is not set; '
'script will run without FSL components')
have_first = fsl_path and first_cmd and os.path.isdir(first_atlas_path)

acpc_string = f'anterior {"& posterior commissures" if ATTEMPT_PC else "commissure"}'
have_acpcdetect = bool(shutil.which('acpcdetect')) and 'ARTHOME' in os.environ
Expand Down Expand Up @@ -302,8 +308,8 @@ def execute(): #pylint: disable=unused-variable
f'(candidate images: {hipp_subfield_all_images})')
elif hippocampi_method == 'first':
if not have_first:
raise MRtrixError('Cannot use "first" method for hippocampi segmentation; '
'check FSL installation')
raise MRtrixError('Cannot use "first" method for hippocampi segmentation: '
'FSL installation not found, and -fist_dir not specified')
else:
if have_hipp_subfields:
hippocampi_method = 'subfields'
Expand All @@ -313,9 +319,8 @@ def execute(): #pylint: disable=unused-variable
' segmentation')
elif have_first:
hippocampi_method = 'first'
app.console('No hippocampal subfields module output detected, '
'but FSL FIRST is installed; '
'will utilise latter for hippocampi segmentation')
app.console('No hippocampal subfields module output detected; '
'FSL FIRST will be utilised for hippocampi segmentation')
else:
hippocampi_method = 'aseg'
app.console('Neither hippocampal subfields module output nor FSL FIRST detected; '
Expand All @@ -337,7 +342,6 @@ def execute(): #pylint: disable=unused-variable
app.warn('Option -sgm_amyg_hipp ignored '
'(hsvs algorithm always assigns hippocampi & ampygdalae as sub-cortical grey matter)')


# Similar logic for thalami
thalami_method = app.ARGS.thalami
if thalami_method:
Expand All @@ -346,8 +350,8 @@ def execute(): #pylint: disable=unused-variable
raise MRtrixError('Could not find thalamic nuclei module output')
elif thalami_method == 'first':
if not have_first:
raise MRtrixError('Cannot use "first" method for thalami segmentation; '
'check FSL installation')
raise MRtrixError('Cannot use "first" method for thalami segmentation: '
'no FSL installation found, and -first_dir not specified')
else:
# Not happy with outputs of thalamic nuclei submodule; default to FIRST
if have_first:
Expand All @@ -365,6 +369,24 @@ def execute(): #pylint: disable=unused-variable
app.console('Neither thalamic nuclei module output nor FSL FIRST detected; '
'FreeSurfer aseg will be used for thalami segmentation')

# Generate the list of structures to be segmented by FIRST
from_first = []
if have_first:
from_first = SGM_FIRST_MAP.copy()
if hippocampi_method == 'subfields':
from_first = { key: value for key, value in from_first.items() if 'Hippocampus' not in value }
if hipp_subfield_has_amyg:
from_first = { key: value for key, value in from_first.items() if 'Amygdala' not in value }
elif hippocampi_method == 'aseg':
from_first = { key: value for key, value in from_first.items() if 'Hippocampus' not in value and 'Amygdala' not in value }
if thalami_method != 'first':
from_first = { key: value for key, value in from_first.items() if 'Thalamus' not in value }

# If -first_dir has been specified, need to make sure that we can find all requisite files
first_input_prefix = None
if app.ARGS.first_dir:
first_input_prefix = fsl.check_first_input(app.ARGS.first_dir, from_first.keys())


###########################
# Commencing segmentation #
Expand Down Expand Up @@ -549,30 +571,18 @@ def execute(): #pylint: disable=unused-variable
progress.done()

if have_first:
app.console('Running FSL FIRST to segment sub-cortical grey matter structures')
from_first = SGM_FIRST_MAP.copy()
if hippocampi_method == 'subfields':
from_first = { key: value for key, value in from_first.items() if 'Hippocampus' not in value }
if hipp_subfield_has_amyg:
from_first = { key: value for key, value in from_first.items() if 'Amygdala' not in value }
elif hippocampi_method == 'aseg':
from_first = { key: value for key, value in from_first.items() if 'Hippocampus' not in value and 'Amygdala' not in value }
if thalami_method != 'first':
from_first = { key: value for key, value in from_first.items() if 'Thalamus' not in value }
if not app.ARGS.first_dir:
run.command([first_cmd, '-s', ','.join(from_first.keys()), '-i', 'T1.nii', '-b', '-o', 'first'])
elif app.ARGS.first_dir:
if not os.path.isdir(os.path.abspath(app.ARGS.first_dir)):
app.error('FIRST directory cannot be found, please check path')
for key, value in from_first.items():
vtk_in_path = 'first-' + key + '_first.vtk'
run.command('cp ' + app.ARGS.first_dir + '/' + vtk_in_path + ' .')
run.command('cp -r ' + app.ARGS.first_dir + '/first.logs' + ' .')
fsl.check_first('first', from_first.keys())
app.cleanup(glob.glob('T1_to_std_sub.*'))
first_vtk_files = []
if app.ARGS.first_dir:
assert first_input_prefix
first_vtk_files = [app.ARGS.first_dir / f'{first_input_prefix}{key}_first.vtk' for key in from_first.keys()]
else:
app.console('Running FSL FIRST to segment sub-cortical grey matter structures')
run.command([first_cmd, '-s', ','.join(from_first.keys()), '-i', 'T1.nii', '-b', '-o', 'first'])
fsl.check_first_output('first', from_first.keys())
first_vtk_files = [f'first-{key}_first-vtk' for key in from_first.keys()]
app.cleanup(glob.glob('T1_to_std_sub.*'))
progress = app.ProgressBar('Mapping FIRST segmentations to image', 2*len(from_first))
for key, value in from_first.items():
vtk_in_path = f'first-{key}_first.vtk'
for (key, value), vtk_in_path in zip(from_first.items(), first_vtk_files):
vtk_converted_path = f'first-{key}_transformed.vtk'
run.command(['meshconvert', vtk_in_path, vtk_converted_path, '-transform', 'first2real', 'T1.nii'])
app.cleanup(vtk_in_path)
Expand Down
Loading

0 comments on commit 67c79a8

Please sign in to comment.