From 426f82509903730d2b3af9d339d10d489c10e7ad Mon Sep 17 00:00:00 2001 From: Adam Kimbler Date: Thu, 10 Oct 2019 14:41:21 -0400 Subject: [PATCH] Allow dicom files or directories and fix field map json completion (#7) Summary --------- ENH: Better completion of fieldmap jsons ENH: Input data can be tarball/tar.gz or directory, bidsifier will detect the type and apply accordingly * Major Refactoring * More changes * Fix * Fix * Update bidsify.py * Update bidsify.py * Update bidsify.py * Update bidsify.py * Update bidsify.py * Update bidsify.py * Update bidsify.py * Update bidsify.py * Update bidsify.py * Update bidsify.py * Update bidsify.py * Update bidsify.py * moved files * Update bidsconvert.sh * Update bidsify.py * Update bidsconvert.sh * Fixed some behavior * Update clean_metadata.py * Preparing for docker build * Fixed Line Endings * Update Dockerfile * Update bidsconvert.sh * first pass * Refactoring * Debug * Update complete_jsons.py * Update complete_jsons.py * Update complete_jsons.py * Update complete_jsons.py * Debug * Debug * Update complete_jsons.py * Added helper function for cleaning up directory ENH: Added Helper function maintain_bids to clean up working directories post run MAINT: Removed troubleshooting prints * Update bidsconvert.sh * Update Dockerfile * Update Dockerfile * Update Dockerfile * Update Dockerfile * Cleaning * Update bidsify.py * Update bidsify.py * Update bidsify.py * Update bidsify.py * Update complete_jsons.py * Update complete_jsons.py * Update complete_jsons.py * debugging * Update complete_jsons.py * FINALLY * Update complete_jsons.py * Update complete_jsons.py * Update complete_jsons.py * Requested Changes Modified clean_metadata to not remove fields, only unwrap global keys * Update bidsify.py * Update bidsify.py Co-Authored-By: Taylor Salo * Update clean_metadata.py Co-Authored-By: Taylor Salo * Update clean_metadata.py * Update clean_metadata.py * Update clean_metadata.py Co-Authored-By: Taylor Salo * Update clean_metadata.py * Update Dockerfile --- Dockerfile | 14 ++- bidsconvert.sh | 46 ++++---- bidsify.py | 155 ++++++++++++++++++++------- build_image.sh | 14 --- clean_metadata.py | 47 +++++--- complete_jsons.py | 268 ++++++++++++---------------------------------- 6 files changed, 248 insertions(+), 296 deletions(-) mode change 100755 => 100644 bidsconvert.sh delete mode 100755 build_image.sh mode change 100755 => 100644 clean_metadata.py mode change 100755 => 100644 complete_jsons.py diff --git a/Dockerfile b/Dockerfile index e91ef26..d2dc8c2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -45,7 +45,7 @@ RUN apt-get update -qq \ # nvm environment variables ENV NVM_DIR /usr/local/nvm -ENV NODE_VERSION 8.0.0 +ENV NODE_VERSION 10.16.3 # install nvm # https://github.com/creationix/nvm#install-script @@ -62,7 +62,7 @@ ENV NODE_PATH $NVM_DIR/v$NODE_VERSION/lib/node_modules ENV PATH $NVM_DIR/versions/node/v$NODE_VERSION/bin:$PATH #------------------------ -# Install dcm2niix v1.0.20171215 +# Install dcm2niix v1.0.20190410 #------------------------ WORKDIR /tmp RUN deps='cmake g++ gcc git make pigz zlib1g-dev' \ @@ -70,7 +70,7 @@ RUN deps='cmake g++ gcc git make pigz zlib1g-dev' \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \ && mkdir dcm2niix \ - && curl -sSL https://github.com/rordenlab/dcm2niix/tarball/v1.0.20171215 | tar xz -C dcm2niix --strip-components 1 \ + && curl -sSL https://github.com/rordenlab/dcm2niix/tarball/v1.0.20190410 | tar xz -C dcm2niix --strip-components 1 \ && mkdir dcm2niix/build && cd dcm2niix/build \ && cmake .. && make \ && make install \ @@ -99,14 +99,15 @@ RUN conda create -y -q --name neuro python=3 \ && sync && conda clean -tipsy && sync \ && /bin/bash -c "source activate neuro \ && pip install git+git://github.com/FIU-Neuro/dcmstack \ - && pip install numpy pandas pybids nibabel heudiconv pydicom python-dateutil" \ + && pip install numpy pandas pybids nibabel pydicom python-dateutil \ + && pip install git+git://github.com/nipy/heudiconv@202f9434819318055e5293486f6bdac489989c52" \ && sync \ && sed -i '$isource activate neuro' $ND_ENTRYPOINT #--------------- # BIDS-validator #--------------- -RUN npm install -g bids-validator@0.27.5 +RUN npm install -g bids-validator@1.3.0 #-------------------------------------------------- # Add NeuroDebian repository @@ -166,4 +167,7 @@ ENV SINGULARITY_TMPDIR /scratch # Set entrypoint script #---------------------- COPY ./ /scripts/ +USER root +RUN chmod 755 -R /scripts/ +USER neuro ENTRYPOINT ["/neurodocker/startup.sh", "/scripts/bidsify.py"] diff --git a/bidsconvert.sh b/bidsconvert.sh old mode 100755 new mode 100644 index fb4b2da..ec28572 --- a/bidsconvert.sh +++ b/bidsconvert.sh @@ -9,12 +9,12 @@ ########################## # Get command line options ########################## -scratchdir=$1 -heuristics=$2 -project=$3 -sub=$4 -sess=${5:-None} - +dir_type=$1 +dicom_dir=$2 +heuristics=$3 +out_dir=$4 +sub=$5 +sess=${6:-None} ###################################### ###################################### @@ -23,40 +23,40 @@ sess=${5:-None} ############################################# if [ "$sess" = "None" ]; then # Put data in BIDS format - heudiconv -d $scratchdir/sub-{subject}.tar -s $sub -f \ - $heuristics -c dcm2niix -o $scratchdir/bids/ --bids --overwrite + heudiconv $dir_type $dicom_dir -s $sub -f \ + $heuristics -c dcm2niix -o $out_dir --bids --overwrite --minmeta minipath=sub-$sub else # Put data in BIDS format - heudiconv -d $scratchdir/sub-{subject}-ses-{session}.tar -s $sub -ss $sess -f \ - $heuristics -c dcm2niix -o $scratchdir/bids/ --bids --overwrite + heudiconv $dir_type $dicom_dir -s $sub -ss $sess -f \ + $heuristics -c dcm2niix -o $out_dir --bids --overwrite --minmeta minipath=sub-$sub/ses-$sess fi ############################################## # Check results, anonymize, and clean metadata ############################################## -if [ -d $scratchdir/bids/$minipath ]; then - chmod -R 774 $scratchdir/bids/$minipath +if [ -d $out_dir/$minipath ]; then + chmod -R 774 $out_dir/$minipath # Deface structural scans - imglist=$(ls $scratchdir/bids/$minipath/anat/*.nii.gz) - for tmpimg in $imglist; do - mri_deface $tmpimg /src/deface/talairach_mixed_with_skull.gca \ + if [ -d $out_dir/$minipath/anat/ ]; then + imglist=$(ls $out_dir/$minipath/anat/*.nii.gz) + for tmpimg in $imglist; do + mri_deface $tmpimg /src/deface/talairach_mixed_with_skull.gca \ /src/deface/face.gca $tmpimg - done - rm ./*.log + done + fi + #rm ./*.log # Add IntendedFor and TotalReadoutTime fields to jsons - python /scripts/complete_jsons.py -d $scratchdir/bids/ -s $sub -ss $sess --overwrite - + python /scripts/complete_jsons.py -d $out_dir -s $sub -ss $sess --overwrite # Remove extraneous fields from jsons - python /scripts/clean_metadata.py $scratchdir/bids/ - + python /scripts/clean_metadata.py $out_dir $sub $sess # Validate dataset and, if it passes, copy files to outdir - bids-validator $scratchdir/bids/ --ignoreWarnings > $scratchdir/validator.txt + bids-validator $out_dir --ignoreWarnings > $out_dir/validator.txt else - echo "FAILED" > $scratchdir/validator.txt + echo "FAILED" > $out_dir/validator.txt echo "Heudiconv failed to convert this dataset to BIDS format." fi ###################################### diff --git a/bidsify.py b/bidsify.py index d9442a4..2e0ca1c 100755 --- a/bidsify.py +++ b/bidsify.py @@ -1,20 +1,79 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python """ From https://github.com/BIDS-Apps/example/blob/aa0d4808974d79c9fbe54d56d3b47bb2cf4e0a0d/run.py """ import os -import os.path as op import tarfile +import pathlib import argparse import subprocess - +import shutil import pydicom import numpy as np import pandas as pd from dateutil.parser import parse +def manage_dicom_dir(dicom_dir): + ''' + Helper function to grab data from dicom header depending on the type of dicom + directory given + + Parameters + ---------- + dicom_dir: Directory containing dicoms for processing + ''' + if dicom_dir.suffix in ('.gz', '.tar'): + open_type = 'r' + if dicom_dir.suffix == '.gz': + open_type = 'r:gz' + with tarfile.open(dicom_dir, open_type) as tar: + dicoms = [mem for mem in tar.getmembers() if + mem.name.endswith('.dcm')] + f_obj = tar.extractfile(dicoms[0]) + data = pydicom.read_file(f_obj) + elif dicom_dir.is_dir(): + f_obj = [x for x in pathlib.Path(dicom_dir).glob('**/*.dcm')][0].as_posix() + data = pydicom.read_file(f_obj) + return data + + +def maintain_bids(output_dir, sub, ses): + ''' + Function that cleans up working directories when called, + if all work is complete, will return directory to bids standard + (removing .heudiconv and tmp directories) + + Parameters + ---------- + output_dir: Path object of bids directory + sub: Subject ID + ses: Session ID, if required + ''' + for root in ['.heudiconv', 'tmp']: + if ses: + if root == '.heudiconv': + shutil.rmtree(output_dir / root / sub / f'ses-{ses}') + else: + shutil.rmtree(output_dir / root / sub / ses) + if (output_dir / root / sub).is_dir(): + if not [x for x in (output_dir / root / sub).iterdir()]: + shutil.rmtree((output_dir / root / sub)) + if (output_dir / root).is_dir(): + if not [x for x in (output_dir / root).iterdir()]: + shutil.rmtree((output_dir / root)) + + def run(command, env={}): + ''' + Helper function that runs a given command and allows for specification of + environment information + + Parameters + ---------- + command: command to be sent to system + env: parameters to be added to environment + ''' merged_env = os.environ merged_env.update(env) process = subprocess.Popen(command, stdout=subprocess.PIPE, @@ -34,59 +93,83 @@ def run(command, env={}): def get_parser(): + ''' + Sets up argument parser for scripts + + Parameters + ---------- + ''' parser = argparse.ArgumentParser(description='BIDS conversion and ' 'anonymization for the FIU ' 'scanner.') parser.add_argument('-d', '--dicomdir', required=True, dest='dicom_dir', help='Directory containing raw data.') - parser.add_argument('--heuristics', required=True, dest='heuristics', + parser.add_argument('-f', '--heuristics', required=True, dest='heuristics', help='Path to the heuristics file.') - parser.add_argument('--project', required=True, dest='project', - help='Name of the project.') - parser.add_argument('--sub', required=True, dest='sub', + parser.add_argument('-s', '--sub', required=True, dest='sub', help='The label of the subject to analyze.') - parser.add_argument('--ses', required=False, dest='ses', + parser.add_argument('-ss', '--ses', required=False, dest='ses', help='Session number', default=None) + parser.add_argument('-o', '--output_dir', dest='output_dir', required=True) return parser def main(argv=None): + ''' + Function that executes when bidsify.py is called + + Parameters inherited from argparser + ---------- + dicom_dir: Directory cointaining dicom data to be processed + heuristics: Path to heuristics file + sub: Subject ID + ses: Session ID, if required + output_dir: Directory to output bidsified data + ''' args = get_parser().parse_args(argv) - # Check inputs - if args.ses is None: - tar_file = op.join(args.dicom_dir, 'sub-{0}.tar'.format(args.sub)) + args.dicom_dir = pathlib.Path(args.dicom_dir) + args.heuristics = pathlib.Path(args.heuristics) + args.output_dir = pathlib.Path(args.output_dir) + if args.dicom_dir.is_file(): + dir_type = '-d' + heudiconv_input = args.dicom_dir.as_posix().replace(args.sub, '{subject}') + if args.ses: + heudiconv_input = heudiconv_input.replace(args.ses, '{session}') else: - tar_file = op.join(args.dicom_dir, - 'sub-{0}-ses-{1}.tar'.format(args.sub, args.ses)) - - if not args.dicom_dir.startswith('/scratch'): - raise ValueError('Dicom files must be in scratch.') - - if not op.isfile(tar_file): - raise ValueError('Argument "dicom_dir" must contain a file ' - 'named {0}.'.format(op.basename(tar_file))) - - if not op.isfile(args.heuristics): + dir_type = '--files' + heudiconv_input = args.dicom_dir.as_posix() + #if not args.dicom_dir.startswith('/scratch'): + # raise ValueError('Dicom files must be in scratch.') + if not args.heuristics.is_file(): raise ValueError('Argument "heuristics" must be an existing file.') # Compile and run command - cmd = ('/scripts/bidsconvert.sh {0} {1} {2} {3} {4}'.format(args.dicom_dir, - args.heuristics, - args.project, - args.sub, args.ses)) - run(cmd, env={'TMPDIR': args.dicom_dir}) + cmd = ('/scripts/bidsconvert.sh {0} {1} {2} {3} {4} {5}'.format(dir_type, + heudiconv_input, + args.heuristics, + args.output_dir, + args.sub, + args.ses)) + args.output_dir.mkdir(parents=True, exist_ok=True) + tmp_path = args.output_dir / 'tmp' / args.sub + if not (args.output_dir / '.bidsignore').is_file(): + with (args.output_dir / '.bidsignore').open('a') as wk_file: + wk_file.write('.heudiconv/\n') + wk_file.write('tmp/\n') + wk_file.write('validator.txt\n') + if args.ses: + tmp_path = tmp_path / args.ses + tmp_path.mkdir(parents=True, exist_ok=True) + run(cmd, env={'TMPDIR': tmp_path.name}) + #Cleans up output directory, returning it to bids standard + maintain_bids(args.output_dir, args.sub, args.ses) # Grab some info to add to the participants file - participants_file = op.join(args.dicom_dir, 'bids/participants.tsv') - if op.isfile(participants_file): + participants_file = args.output_dir / 'participants.tsv' + if participants_file.is_file(): df = pd.read_csv(participants_file, sep='\t') - with tarfile.open(tar_file, 'r') as tar: - dicoms = [mem for mem in tar.getmembers() if - mem.name.endswith('.dcm')] - f_obj = tar.extractfile(dicoms[0]) - data = pydicom.read_file(f_obj) - + data = manage_dicom_dir(args.dicom_dir) if data.get('PatientAge'): age = data.PatientAge.replace('Y', '') try: @@ -100,7 +183,7 @@ def main(argv=None): age = np.nan df2 = pd.DataFrame(columns=['age', 'sex', 'weight'], data=[[age, data.PatientSex, data.PatientWeight]]) - df = pd.concat((df, df2), axis=1) + df = pd.concat([df, df2], axis=1) df.to_csv(participants_file, sep='\t', index=False) diff --git a/build_image.sh b/build_image.sh deleted file mode 100755 index d81ea50..0000000 --- a/build_image.sh +++ /dev/null @@ -1,14 +0,0 @@ -# chmod stuff -chmod 775 *.py -chmod 775 *.sh - -docker build -t cis/bidsify:v0.0.2 . - -# This converts the Docker image cis/bidsify to a singularity image, -# to be located in /Users/tsalo/Documents/singularity_images/ -docker run --privileged -t --rm \ - -v /var/run/docker.sock:/var/run/docker.sock \ - -v /Users/tsalo/Documents/singularity_images:/output \ - singularityware/docker2singularity \ - -m "/scratch" \ - cis/bidsify:v0.0.2 diff --git a/clean_metadata.py b/clean_metadata.py old mode 100755 new mode 100644 index a55ed47..67b1af1 --- a/clean_metadata.py +++ b/clean_metadata.py @@ -1,24 +1,34 @@ +#!/usr/bin/env python """ Author: Taylor Salo, tsalo006@fiu.edu Edited: Michael Riedel, miriedel@fiu.edu; 4/18/2018 """ from __future__ import print_function - import sys import json +from bids import BIDSLayout -from bids.grabbids import BIDSLayout +def main(bids_dir, sub, sess): + ''' + Removes unnecessary metadata from scan sidecar jsons -def main(bids_dir): + Parameters + ---------- + bids_dir: path to BIDS dataset + ''' layout = BIDSLayout(bids_dir) - scans = layout.get(extensions='nii.gz') + scans = layout.get(extension='nii.gz', subject=sub, session=sess) + """ KEEP_KEYS = [ - 'AnatomicalLandmarkCoordinates', 'AcquisitionDuration', 'CogAtlasID', + 'AnatomicalLandmarkCoordinates', 'AcquisitionTime', + 'AcquisitionDuration', 'CogAtlasID', 'CogPOID', 'CoilCombinationMethod', 'ConversionSoftware', 'ConversionSoftwareVersion', 'DelayAfterTrigger', 'DelayTime', - 'DeviceSerialNumber', 'DwellTime', 'EchoNumbers', 'EchoTime', + 'DeviceSerialNumber', 'DwellTime', 'EchoNumber', 'EchoNumbers', + 'EchoTime', 'EchoTime1', 'EchoTime2', + 'EchoTime1', 'EchoTime2', 'EchoTrainLength', 'EffectiveEchoSpacing', 'FlipAngle', 'GradientSetType', 'HighBit', 'ImagedNucleus', 'ImageType', 'ImagingFrequency', @@ -42,16 +52,19 @@ def main(bids_dir): 'SliceThickness', 'SliceTiming', 'SoftwareVersions', 'SpacingBetweenSlices', 'StationName', 'TaskDescription', 'TaskName', 'TotalReadoutTime', 'Units', 'VolumeTiming'] - + """ for scan in scans: - json_file = scan.filename.replace('.nii.gz', '.json') - metadata = layout.get_metadata(scan.filename) - metadata2 = {key: metadata[key] for key in KEEP_KEYS if key in - metadata.keys()} - for key in KEEP_KEYS: - if key not in metadata.keys() and 'global' in metadata.keys(): - if key in metadata['global']['const'].keys(): - metadata2[key] = metadata['global']['const'][key] + json_file = scan.path.replace('.nii.gz', '.json') + metadata = layout.get_metadata(scan.path) + metadata2 = {key: metadata[key] for key in metadata.keys() if key != 'global'} + global_keys = {} + if 'global' in metadata.keys(): + if 'const' in metadata['global']: + global_keys = metadata['global']['const'] + + for key in global_keys: + if key not in metadata: + metadata2[key] = global_keys[key] with open(json_file, 'w') as fo: json.dump(metadata2, fo, sort_keys=True, indent=4) @@ -59,4 +72,6 @@ def main(bids_dir): if __name__ == '__main__': folder = sys.argv[1] - main(folder) + sub = sys.argv[2] + sess = sys.argv[3] + main(folder, sub, sess) diff --git a/complete_jsons.py b/complete_jsons.py old mode 100755 new mode 100644 index 14fb800..4ef9d79 --- a/complete_jsons.py +++ b/complete_jsons.py @@ -8,30 +8,54 @@ - Add TaskName to functional scan jsons. """ import json -import bisect import argparse import os.path as op - import nibabel as nib -from bids.grabbids import BIDSLayout +from bids import BIDSLayout -def _files_to_dict(file_list): - """Convert list of BIDS Files to dictionary where key is - acquisition time (datetime.datetime object) and value is - the File object. +def intended_for_gen(niftis, fmap_nifti): + """ + Generates 'IntenderFor' field for a given fieldmap nifti based on a list of + given niftis + - niftis : list of niftis that a for which a given fieldmap could be intended + - fmap_nifti: the fieldmap nifti for which IntendedFor field needs to be created """ + intended_for = [] + fmap_entities = fmap_nifti.get_entities() + acq_time = fmap_nifti.get_metadata()['AcquisitionTime'] out_dict = {} - for file_ in file_list: - fname = file_.filename - with open(fname, 'r') as f_obj: - data = json.load(f_obj) - acq_time = int(data['SeriesNumber']) - out_dict[acq_time] = file_ - return out_dict + for nifti in niftis: + nifti_meta = nifti.get_metadata() + if nifti_meta['AcquisitionTime'] <= acq_time: + continue + if nifti_meta['AcquisitionTime'] in out_dict \ + and nifti not in out_dict[nifti_meta['AcquisitionTime']]: + out_dict[nifti_meta['AcquisitionTime']].append(nifti) + elif nifti_meta['AcquisitionTime'] not in out_dict: + out_dict[nifti_meta['AcquisitionTime']] = [nifti] + for num in sorted([x for x in out_dict]): + target_entities = [x.get_entities() for x in out_dict[num]] + if target_entities[0]['datatype'] == 'fmap': + if any([all([fmap_entities[x] == i[x] for x in fmap_entities \ + if x != 'run']) for i in target_entities]): + break + else: + continue + if 'acquisition' in fmap_entities \ + and fmap_entities['acquisition'] != target_entities[0]['datatype']: + continue + if 'session' in target_entities[0]: + intended_for.extend(sorted([op.join('ses-{0}'.format(target_entities[0]['session']), + target_entities[0]['datatype'], + x.filename) for x in out_dict[num]])) + else: + intended_for.extend(sorted([op.join(target_entities[0]['datatype'], + x.filename) for x in out_dict[num]])) + return sorted(intended_for) -def complete_fmap_jsons(bids_dir, subs, ses, overwrite): +def complete_jsons(bids_dir, subs, ses, overwrite): """ Assign 'IntendedFor' field to field maps in BIDS dataset. Uses the most recent field map before each functional or DWI scan, based on @@ -45,205 +69,45 @@ def complete_fmap_jsons(bids_dir, subs, ses, overwrite): ses: string of session overwrite: bool """ - layout = BIDSLayout(bids_dir) - data_suffix = '.nii.gz' - - for sid in subs: - # Remove potential trailing slash with op.abspath - if not sid.startswith('sub-'): - temp_sid = 'sub-{0}'.format(sid) - else: - temp_sid = sid - subj_dir = op.abspath(op.join(bids_dir, temp_sid)) - - for dir_ in ['AP', 'PA']: - for acq in ['func', 'dwi']: - # Get json files for field maps - if ses: - fmap_jsons = layout.get(subject=sid, session=ses, - modality='fmap', extensions='json', - dir=dir_, acq=acq) - else: - fmap_jsons = layout.get(subject=sid, - modality='fmap', extensions='json', - dir=dir_, acq=acq) - - if fmap_jsons: - fmap_dict = _files_to_dict(fmap_jsons) - dts = sorted(fmap_dict.keys()) - intendedfor_dict = {fmap.filename: [] for fmap in - fmap_jsons} - - # Get all scans with associated field maps - if ses: - dat_jsons = layout.get(subject=sid, session=ses, - modality=acq, extensions='json') - else: - dat_jsons = layout.get(subject=sid, - modality=acq, extensions='json') - - dat_jsons = _files_to_dict(dat_jsons) - for dat_file in dat_jsons.keys(): - fn, _ = op.splitext(dat_jsons[dat_file].filename) - fn += data_suffix - fn = fn.split(subj_dir)[-1][1:] # Get relative path - - # Find most immediate field map before scan - idx = bisect.bisect_right(dts, dat_file) - 1 - - # if there is no field map *before* the scan, grab the - # first field map - if idx == -1: - idx = 0 - fmap_file = fmap_dict[dts[idx]].filename - intendedfor_dict[fmap_file].append(fn) - - for fmap_file in intendedfor_dict.keys(): - with open(fmap_file, 'r') as f_obj: - data = json.load(f_obj) - - if overwrite or ('IntendedFor' not in data.keys()): - data['IntendedFor'] = intendedfor_dict[fmap_file] - with open(fmap_file, 'w') as f_obj: - json.dump(data, f_obj, sort_keys=True, - indent=4) - - niftis = layout.get(subject=sid, session=ses, modality='fmap', - extensions='nii.gz') - for nifti in niftis: - nifti_fname = nifti.filename - img = nib.load(nifti_fname) - - # get_nearest doesn't work with field maps atm - data = layout.get_metadata(nifti_fname) - json_fname = nifti_fname.replace('.nii.gz', '.json') - - if overwrite or 'TotalReadoutTime' not in data.keys(): - # This next bit taken shamelessly from fmriprep - acc = float(data.get('ParallelReductionFactorInPlane', 1.0)) - pe_idx = {'i': 0, - 'j': 1, - 'k': 2}[data['PhaseEncodingDirection'][0]] - npe = img.shape[pe_idx] - etl = npe // acc - ees = data.get('EffectiveEchoSpacing', None) - if ees is None: - raise Exception('Field "EffectiveEchoSpacing" not ' - 'found in json') - trt = ees * (etl - 1) - data['TotalReadoutTime'] = trt - with open(json_fname, 'w') as f_obj: - json.dump(data, f_obj, sort_keys=True, indent=4) - - -def complete_func_jsons(bids_dir, subs, ses, overwrite): - """ - Calculate TotalReadoutTime and add TaskName - - Parameters - ---------- - bids_dir: path to BIDS dataset - subs: list of subjects - ses: string of session - overwrite: bool - """ - layout = BIDSLayout(bids_dir) - for sid in subs: - # Assign TaskName - for task in layout.get_tasks(): - if ses: - niftis = layout.get(subject=sid, session=ses, modality='func', - task=task, extensions='nii.gz') - else: - niftis = layout.get(subject=sid, modality='func', - task=task, extensions='nii.gz') - - for nifti in niftis: - nifti_fname = nifti.filename - img = nib.load(nifti_fname) - # get_nearest doesn't work with field maps atm - data = layout.get_metadata(nifti_fname) - json_fname = nifti_fname.replace('.nii.gz', '.json') - - if overwrite or 'TotalReadoutTime' not in data.keys(): - # This next bit taken shamelessly from fmriprep - acc = float(data.get('ParallelReductionFactorInPlane', - 1.0)) - pe_idx = {'i': 0, - 'j': 1, - 'k': 2}[data['PhaseEncodingDirection'][0]] - npe = img.shape[pe_idx] - etl = npe // acc - ees = data.get('EffectiveEchoSpacing', None) - if ees is None: - raise Exception('Field "EffectiveEchoSpacing" not ' - 'found in json') - trt = ees * (etl - 1) - data['TotalReadoutTime'] = trt - - if overwrite or ('TaskName' not in data.keys()): - data['TaskName'] = task - - if overwrite or ('TaskName' not in data.keys()) or \ - ('TotalReadoutTime' not in data.keys()): - with open(json_fname, 'w') as f_obj: - json.dump(data, f_obj, sort_keys=True, indent=4) - - -def complete_dwi_jsons(bids_dir, subs, ses, overwrite): - """ - Calculate TotalReadoutTime - - Parameters - ---------- - bids_dir: path to BIDS dataset - subs: list of subjects - ses: string of session - overwrite: bool - """ - layout = BIDSLayout(bids_dir) + layout = BIDSLayout(op.abspath(bids_dir), validate=False) for sid in subs: if ses: - niftis = layout.get(subject=sid, session=ses, modality='dwi', - extensions='nii.gz') + niftis = layout.get(subject=sid, session=ses, + extension='nii.gz', + datatype=['func', 'fmap', 'dwi']) else: - niftis = layout.get(subject=sid, modality='dwi', - extensions='nii.gz') - + niftis = layout.get(subject=sid, + extension='nii.gz', + datatype=['func', 'fmap', 'dwi']) for nifti in niftis: - nifti_fname = nifti.filename - img = nib.load(nifti_fname) # get_nearest doesn't work with field maps atm - data = layout.get_metadata(nifti_fname) - json_fname = nifti_fname.replace('.nii.gz', '.json') - - if overwrite or 'TotalReadoutTime' not in data.keys(): + data = nifti.get_metadata() + dump = 0 + json_path = nifti.path.replace('.nii.gz', '.json') + if 'EffectiveEchoSpacing' in data.keys() and \ + (overwrite or 'TotalReadoutTime' not in data.keys()): # This next bit taken shamelessly from fmriprep - acc = float(data.get('ParallelReductionFactorInPlane', 1.0)) - pe_idx = {'i': 0, - 'j': 1, - 'k': 2}[data['PhaseEncodingDirection'][0]] - npe = img.shape[pe_idx] - etl = npe // acc + pe_idx = {'i': 0, 'j': 1, 'k': 2}[data['PhaseEncodingDirection'][0]] + etl = nib.load(nifti.path).shape[pe_idx] \ + // float(data.get('ParallelReductionFactorInPlane', 1.0)) ees = data.get('EffectiveEchoSpacing', None) if ees is None: raise Exception('Field "EffectiveEchoSpacing" not ' 'found in json') - trt = ees * (etl - 1) - data['TotalReadoutTime'] = trt - with open(json_fname, 'w') as f_obj: + data['TotalReadoutTime'] = ees * (etl - 1) + dump = 1 + if 'task' in nifti.get_entities() and (overwrite or 'TaskName' not in data.keys()): + data['TaskName'] = nifti.get_entities()['task'] + dump = 1 + if nifti.get_entities()['datatype'] == 'fmap' \ + and (overwrite or 'IntendedFor' not in data.keys()): + data['IntendedFor'] = intended_for_gen(niftis, nifti) + dump = 1 + if dump == 1: + with open(json_path, 'w') as f_obj: json.dump(data, f_obj, sort_keys=True, indent=4) -def run(bids_dir, subs, ses, overwrite): - """ - Complete field maps, functional scans, and DWI scans. - """ - complete_fmap_jsons(bids_dir, subs, ses, overwrite) - complete_func_jsons(bids_dir, subs, ses, overwrite) - complete_dwi_jsons(bids_dir, subs, ses, overwrite) - - def main(args=None): docstr = __doc__ parser = argparse.ArgumentParser(description=docstr) @@ -261,7 +125,7 @@ def main(args=None): args = parser.parse_args(args) if isinstance(args.session, str) and args.session == 'None': args.session = None - run(args.bids_dir, args.subs, args.session, args.overwrite) + complete_jsons(args.bids_dir, args.subs, args.session, args.overwrite) if __name__ == '__main__':