From 1ef39ebf3716c54ed5a82c4bd39ac21be76de4d1 Mon Sep 17 00:00:00 2001 From: frheault Date: Mon, 3 Aug 2020 12:03:14 -0400 Subject: [PATCH 01/40] Added atol every allclose --- scripts/scil_compute_connectivity.py | 4 ++-- scripts/scil_compute_hdf5_average_density_map.py | 4 ++-- scripts/scil_compute_local_tracking.py | 2 +- scripts/scil_compute_pft.py | 2 +- scripts/scil_crop_volume.py | 2 +- scripts/scil_decompose_connectivity.py | 2 +- scripts/scil_run_commit.py | 8 +++++--- 7 files changed, 13 insertions(+), 11 deletions(-) diff --git a/scripts/scil_compute_connectivity.py b/scripts/scil_compute_connectivity.py index 2895ebc97..e0c901218 100755 --- a/scripts/scil_compute_connectivity.py +++ b/scripts/scil_compute_connectivity.py @@ -85,8 +85,8 @@ def _processing_wrapper(args): affine, dimensions, voxel_sizes, _ = get_reference_info(labels_img) measures_to_return = {} - if not (np.allclose(hdf5_file.attrs['affine'], affine) - and np.allclose(hdf5_file.attrs['dimensions'], dimensions)): + if not (np.allclose(hdf5_file.attrs['affine'], affine, atol=1e-03) + and np.array_equal(hdf5_file.attrs['dimensions'], dimensions)): raise ValueError('Provided hdf5 have incompatible headers.') # Precompute to save one transformation, insert later diff --git a/scripts/scil_compute_hdf5_average_density_map.py b/scripts/scil_compute_hdf5_average_density_map.py index de746b5ca..e5127b559 100755 --- a/scripts/scil_compute_hdf5_average_density_map.py +++ b/scripts/scil_compute_hdf5_average_density_map.py @@ -65,8 +65,8 @@ def _average_wrapper(args): for hdf5_filename in hdf5_filenames: hdf5_file = h5py.File(hdf5_filename, 'r') - if not (np.allclose(hdf5_file.attrs['affine'], affine) - and np.allclose(hdf5_file.attrs['dimensions'], dimensions)): + if not (np.allclose(hdf5_file.attrs['affine'], affine, atol=1e-03) + and np.array_equal(hdf5_file.attrs['dimensions'], dimensions)): raise IOError('{} do not have a compatible header'.format( hdf5_filename)) # scil_decompose_connectivity.py saves the streamlines in VOX/CORNER diff --git a/scripts/scil_compute_local_tracking.py b/scripts/scil_compute_local_tracking.py index e4a1fff1c..0d9433639 100755 --- a/scripts/scil_compute_local_tracking.py +++ b/scripts/scil_compute_local_tracking.py @@ -208,7 +208,7 @@ def main(): # will not yield correct results. fodf_sh_img = nib.load(args.in_sh) if not np.allclose(np.mean(fodf_sh_img.header.get_zooms()[:3]), - fodf_sh_img.header.get_zooms()[0], atol=1.e-3): + fodf_sh_img.header.get_zooms()[0], atol=1e-03): parser.error( 'SH file is not isotropic. Tracking cannot be ran robustly.') diff --git a/scripts/scil_compute_pft.py b/scripts/scil_compute_pft.py index 052e684eb..e2e015228 100755 --- a/scripts/scil_compute_pft.py +++ b/scripts/scil_compute_pft.py @@ -198,7 +198,7 @@ def main(): fodf_sh_img = nib.load(args.in_sh) if not np.allclose(np.mean(fodf_sh_img.header.get_zooms()[:3]), - fodf_sh_img.header.get_zooms()[0], atol=1.e-3): + fodf_sh_img.header.get_zooms()[0], atol=1e-03): parser.error( 'SH file is not isotropic. Tracking cannot be ran robustly.') diff --git a/scripts/scil_crop_volume.py b/scripts/scil_crop_volume.py index d25775c31..2c6657244 100755 --- a/scripts/scil_crop_volume.py +++ b/scripts/scil_crop_volume.py @@ -117,7 +117,7 @@ def main(): wbbox = pickle.load(bbox_file) if not args.ignore_voxel_size: voxel_size = img.header.get_zooms()[0:3] - if not np.allclose(voxel_size, wbbox.voxel_size[0:3]): + if not np.allclose(voxel_size, wbbox.voxel_size[0:3], atol=1e-03): raise IOError("Bounding box and data voxel sizes are not " "compatible. Use option --ignore_voxel_size " "to ignore this test.") diff --git a/scripts/scil_decompose_connectivity.py b/scripts/scil_decompose_connectivity.py index 7780ff4eb..664be43bc 100755 --- a/scripts/scil_decompose_connectivity.py +++ b/scripts/scil_decompose_connectivity.py @@ -248,7 +248,7 @@ def main(): # Voxel size must be isotropic, for speed/performance considerations vox_sizes = img_labels.header.get_zooms() - if not np.allclose(np.mean(vox_sizes), vox_sizes, atol=0.001): + if not np.allclose(np.mean(vox_sizes), vox_sizes, atol=1e-03): parser.error('Labels must be isotropic') logging.info('*** Loading streamlines ***') diff --git a/scripts/scil_run_commit.py b/scripts/scil_run_commit.py index 036e2b5fd..aa93a2a87 100755 --- a/scripts/scil_run_commit.py +++ b/scripts/scil_run_commit.py @@ -212,8 +212,10 @@ def main(): streamlines = [] len_list = [0] hdf5_file = h5py.File(args.in_tractogram, 'r') - if not (np.allclose(hdf5_file.attrs['affine'], dwi_img.affine) - and np.allclose(hdf5_file.attrs['dimensions'], dwi_img.shape[0:3])): + if not (np.allclose(hdf5_file.attrs['affine'], dwi_img.affine, + atol=1e-03) + and np.array_equal(hdf5_file.attrs['dimensions'], + dwi_img.shape[0:3])): parser.error('{} does not have a compatible header with {}'.format( args.in_tractogram, args.in_dwi)) @@ -271,7 +273,7 @@ def main(): # (based on order of magnitude of signal) img = nib.load(args.in_dwi) data = img.get_fdata(dtype=np.float32) - data[data < (0.001*10**np.floor(np.log10(np.mean(data[data>0]))))] = 0 + data[data < (0.001*10**np.floor(np.log10(np.mean(data[data > 0]))))] = 0 nib.save(nib.Nifti1Image(data, img.affine), os.path.join(tmp_dir.name, 'dwi_zero_fix.nii.gz')) From 25c000d91d204e671da8b9b696003cf8f97ca5c1 Mon Sep 17 00:00:00 2001 From: frheault Date: Mon, 3 Aug 2020 12:04:35 -0400 Subject: [PATCH 02/40] sync gdrive md5sum --- scilpy/io/fetcher.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scilpy/io/fetcher.py b/scilpy/io/fetcher.py index 16f246fcd..ee85c214d 100644 --- a/scilpy/io/fetcher.py +++ b/scilpy/io/fetcher.py @@ -26,7 +26,7 @@ def get_testing_files_dict(): '44eee21bcc0597836ba2eb32d41ed98c'], 'atlas.zip': ['1waYx4ED3qwzyJqrICjjgGXXBW2v4ZCYJ', - '0c1d3da231d1a8b837b5d756c9170b08'], + 'af15ab98bc1faf2554fa0abb1891f345'], 'bst.zip': ['1YprJRnyXk7VRHUkb-bJLs69C1v3tPd1S', 'c0551a28dcefcd7cb53f572b1794b3e8'], @@ -44,7 +44,7 @@ def get_testing_files_dict(): 'dbe796fb75c3e1e5559fad3308982769'], 'others.zip': ['12BAszPjE1A9L2RbQJIFpkPzqUJfPdYO6', - '075deda4532042192c4103df4371ecb4'], + '91117b26d44e7e53d0a00022ed92fb38'], 'processing.zip': ['1caaKoAChyPs5c4WemQWUsR-efD_q2z_b', '57aee810f2f5c687df48de65935bb527'], From 68928d84c11414ca732c56137b49d616610d74bc Mon Sep 17 00:00:00 2001 From: frheault Date: Mon, 3 Aug 2020 13:18:40 -0400 Subject: [PATCH 03/40] Update for suffix and more robust visualisation --- scripts/scil_reorder_connectivity.py | 25 ++++---- scripts/scil_visualize_connectivity.py | 71 +++++++++++----------- scripts/tests/test_reorder_connectivity.py | 3 +- 3 files changed, 52 insertions(+), 47 deletions(-) diff --git a/scripts/scil_reorder_connectivity.py b/scripts/scil_reorder_connectivity.py index e76582a26..177be1bf0 100755 --- a/scripts/scil_reorder_connectivity.py +++ b/scripts/scil_reorder_connectivity.py @@ -41,14 +41,13 @@ def _build_arg_parser(): p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=__doc__, epilog=EPILOG) - p.add_argument('in_matrix', nargs='+', + p.add_argument('in_matrices', nargs='+', help='Connectivity matrix or matrices in numpy (.npy) format.') p.add_argument('in_ordering', - help='Txt file with the sub-network as keys and x/y ' - 'lists as value (.txt).') - p.add_argument('out_prefix', - help='Prefix for the output matrix filename.') + help='Txt file with the first row as x and second row as y.') + p.add_argument('--out_suffix', + help='Suffix for the output matrix filename.') p.add_argument('--out_dir', help='Output directory to the re-ordered matrix or matrices.') p.add_argument('--labels_list', @@ -65,18 +64,20 @@ def main(): parser = _build_arg_parser() args = parser.parse_args() - assert_inputs_exist(parser, args.in_matrix, + assert_inputs_exist(parser, args.in_matrices, [args.labels_list, args.in_ordering]) assert_output_dirs_exist_and_empty(parser, args, [], args.out_dir) if args.out_dir is None: args.out_dir = './' + if args.out_suffix is None: + args.out_suffix = "" out_filenames = [] - for filename in args.in_matrix: + for filename in args.in_matrices: basename, _ = os.path.splitext(filename) basename = os.path.basename(basename) out_filenames.append('{}/{}{}.npy'.format(args.out_dir, - args.out_prefix, - basename)) + basename, + args.out_suffix)) assert_outputs_exist(parser, args, out_filenames) with open(args.in_ordering, 'r') as my_file: @@ -84,7 +85,7 @@ def main(): ordering = [[int(val) for val in lines[0].split()], [int(val) for val in lines[1].split()]] - for filename in args.in_matrix: + for filename in args.in_matrices: basename, _ = os.path.splitext(filename) basename = os.path.basename(basename) matrix = load_matrix_in_any_format(filename) @@ -107,8 +108,8 @@ def main(): tmp_matrix = matrix[tuple(indices_1), :] tmp_matrix = tmp_matrix[:, tuple(indices_2)] save_matrix_in_any_format('{}/{}{}.npy'.format(args.out_dir, - args.out_prefix, - basename), + basename, + args.out_suffix), tmp_matrix) diff --git a/scripts/scil_visualize_connectivity.py b/scripts/scil_visualize_connectivity.py index 54f11a270..cd2764063 100755 --- a/scripts/scil_visualize_connectivity.py +++ b/scripts/scil_visualize_connectivity.py @@ -104,6 +104,9 @@ def main(): if not args.show_only: assert_outputs_exist(parser, args, args.out_png, args.histogram) + if args.lookup_table and not args.labels_list: + parser.error('Lookup table axis naming requires --labels_list.') + matrix = load_matrix_in_any_format(args.in_matrix) if args.log: @@ -134,45 +137,45 @@ def main(): if args.labels_list: labels_list = np.loadtxt(args.labels_list, dtype=np.int16).tolist() - if not args.reorder_txt and not args.lookup_table: - if len(labels_list) != matrix.shape[0] \ - or len(labels_list) != matrix.shape[1]: - logging.warning('The provided matrix not the same size as ' - 'the labels list.') - x_legend = labels_list[0:matrix.shape[0]] - y_legend = labels_list[0:matrix.shape[1]] + if args.labels_list and not args.reorder_txt and not args.lookup_table: + if len(labels_list) != matrix.shape[0] \ + or len(labels_list) != matrix.shape[1]: + logging.warning('The provided matrix not the same size as ' + 'the labels list.') + x_legend = labels_list[0:matrix.shape[0]] + y_legend = labels_list[0:matrix.shape[1]] + else: + x_legend = x_ticks + y_legend = y_ticks - if args.reorder_txt: - with open(args.reorder_txt, 'r') as my_file: - lines = my_file.readlines() - x_legend = [int(val) for val in lines[0].split()] - y_legend = [int(val) for val in lines[1].split()] + if args.reorder_txt: + with open(args.reorder_txt, 'r') as my_file: + lines = my_file.readlines() + x_legend = [int(val) for val in lines[0].split()] + y_legend = [int(val) for val in lines[1].split()] - if args.lookup_table: + if args.lookup_table: + if args.reorder_txt: logging.warning('Using a lookup table, make sure the reordering ' 'json contain labels, not coordinates') - with open(args.lookup_table) as json_data: - lut = json.load(json_data) - - x_legend = [] - y_legend = [] - if args.reorder_txt: - with open(args.reorder_txt, 'r') as my_file: - lines = my_file.readlines() - x_list = [int(val) for val in lines[0].split()] - y_list = [int(val) for val in lines[1].split()] - else: - x_list = labels_list[0:matrix.shape[0]] - y_list = labels_list[0:matrix.shape[1]] - - x_legend = [lut[str(x)] if str(x) in lut else str(x) - for x in x_list] - y_legend = [lut[str(x)] if str(x) in lut else str(x) - for x in y_list] + with open(args.lookup_table) as json_data: + lut = json.load(json_data) - else: - x_legend = x_ticks - y_legend = y_ticks + x_legend = [] + y_legend = [] + if args.reorder_txt: + with open(args.reorder_txt, 'r') as my_file: + lines = my_file.readlines() + x_list = [int(val) for val in lines[0].split()] + y_list = [int(val) for val in lines[1].split()] + else: + x_list = labels_list[0:matrix.shape[0]] + y_list = labels_list[0:matrix.shape[1]] + + x_legend = [lut[str(x)] if str(x) in lut else str(x) + for x in x_list] + y_legend = [lut[str(x)] if str(x) in lut else str(x) + for x in y_list] if len(x_ticks) != len(x_legend) \ or len(y_ticks) != len(y_legend): diff --git a/scripts/tests/test_reorder_connectivity.py b/scripts/tests/test_reorder_connectivity.py index 694d19801..438fc0d4f 100644 --- a/scripts/tests/test_reorder_connectivity.py +++ b/scripts/tests/test_reorder_connectivity.py @@ -26,5 +26,6 @@ def test_execution_connectivity(script_runner): in_labels_list = os.path.join(get_home(), 'connectivity', 'labels_list.txt') ret = script_runner.run('scil_reorder_connectivity.py', in_sc, in_txt, - 'sc_reo_', '--labels_list', in_labels_list) + '--out_suffix', '_sc_reo', + '--labels_list', in_labels_list) assert ret.success From 383e87a7ede3dc3ee1704aaf6a73c01e9e99daf7 Mon Sep 17 00:00:00 2001 From: frheault Date: Mon, 3 Aug 2020 13:37:24 -0400 Subject: [PATCH 04/40] Update gdrive link --- scilpy/io/fetcher.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scilpy/io/fetcher.py b/scilpy/io/fetcher.py index 16f246fcd..ee85c214d 100644 --- a/scilpy/io/fetcher.py +++ b/scilpy/io/fetcher.py @@ -26,7 +26,7 @@ def get_testing_files_dict(): '44eee21bcc0597836ba2eb32d41ed98c'], 'atlas.zip': ['1waYx4ED3qwzyJqrICjjgGXXBW2v4ZCYJ', - '0c1d3da231d1a8b837b5d756c9170b08'], + 'af15ab98bc1faf2554fa0abb1891f345'], 'bst.zip': ['1YprJRnyXk7VRHUkb-bJLs69C1v3tPd1S', 'c0551a28dcefcd7cb53f572b1794b3e8'], @@ -44,7 +44,7 @@ def get_testing_files_dict(): 'dbe796fb75c3e1e5559fad3308982769'], 'others.zip': ['12BAszPjE1A9L2RbQJIFpkPzqUJfPdYO6', - '075deda4532042192c4103df4371ecb4'], + '91117b26d44e7e53d0a00022ed92fb38'], 'processing.zip': ['1caaKoAChyPs5c4WemQWUsR-efD_q2z_b', '57aee810f2f5c687df48de65935bb527'], From 10bc356f251365e82e35f63230890fc92aabca86 Mon Sep 17 00:00:00 2001 From: frheault Date: Tue, 4 Aug 2020 21:21:59 -0400 Subject: [PATCH 05/40] Fix threshold usage for commit and h5 --- scilpy/io/streamlines.py | 7 +++++-- scripts/scil_run_commit.py | 26 +++++++++++++++++++++++--- 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/scilpy/io/streamlines.py b/scilpy/io/streamlines.py index e0eddc411..c13779625 100644 --- a/scilpy/io/streamlines.py +++ b/scilpy/io/streamlines.py @@ -5,8 +5,8 @@ import tempfile from dipy.io.streamline import load_tractogram -import h5py import nibabel as nib +from nibabel.streamlines.array_sequence import ArraySequence import numpy as np @@ -219,6 +219,9 @@ def reconstruct_streamlines(data, offsets, lengths, indices=None): List of streamlines. """ + if data.ndim == 2: + data = np.array(data).flatten() + if indices is None: indices = np.arange(len(offsets)) @@ -227,4 +230,4 @@ def reconstruct_streamlines(data, offsets, lengths, indices=None): streamline = data[offsets[i]*3:offsets[i]*3 + lengths[i]*3] streamlines.append(streamline.reshape((lengths[i], 3))) - return streamlines + return ArraySequence(streamlines) diff --git a/scripts/scil_run_commit.py b/scripts/scil_run_commit.py index 036e2b5fd..a9257972e 100755 --- a/scripts/scil_run_commit.py +++ b/scripts/scil_run_commit.py @@ -62,6 +62,7 @@ from nibabel.streamlines import Tractogram from scilpy.io.streamlines import (lazy_streamlines_count, + reconstruct_streamlines, reconstruct_streamlines_from_hdf5) from scilpy.io.utils import (add_overwrite_arg, add_processes_arg, @@ -271,7 +272,7 @@ def main(): # (based on order of magnitude of signal) img = nib.load(args.in_dwi) data = img.get_fdata(dtype=np.float32) - data[data < (0.001*10**np.floor(np.log10(np.mean(data[data>0]))))] = 0 + data[data < (0.001*10**np.floor(np.log10(np.mean(data[data > 0]))))] = 0 nib.save(nib.Nifti1Image(data, img.affine), os.path.join(tmp_dir.name, 'dwi_zero_fix.nii.gz')) @@ -323,7 +324,7 @@ def main(): pk_file = open(os.path.join(commit_results_dir, 'results.pickle'), 'rb') commit_output_dict = pickle.load(pk_file) nbr_streamlines = lazy_streamlines_count(args.in_tractogram) - commit_weights = commit_output_dict[2][:nbr_streamlines] + commit_weights = np.asarray(commit_output_dict[2][:nbr_streamlines]) np.savetxt(os.path.join(commit_results_dir, 'commit_weights.txt'), commit_weights) @@ -340,10 +341,29 @@ def main(): for i, key in enumerate(hdf5_keys): group = hdf5_file[key] tmp_commit_weights = commit_weights[len_list[i]:len_list[i+1]] + if args.threshold_weights is not None: + essential_ind = np.where( + tmp_commit_weights > args.threshold_weights)[0] + tmp_streamlines = reconstruct_streamlines(group['data'], + group['offsets'], + group['lengths'], + indices=essential_ind) + # Replacing the data with the one above the threshold + # Safe since this hdf5 was a copy in the first place + del group['data'] + del group['offsets'] + del group['lengths'] + group['data'] = tmp_streamlines.get_data() + group['offsets'] = tmp_streamlines._offsets + group['lengths'] = tmp_streamlines._lengths + + repeat_commit_weights = np.ones((len(tmp_streamlines._offsets))) * \ + np.average(tmp_commit_weights) + if 'commit_weights' in group: del group['commit_weights'] group.create_dataset('commit_weights', - data=tmp_commit_weights) + data=repeat_commit_weights) files = os.listdir(commit_results_dir) for f in files: From 2eee892b204617477dc8173f73c3cd5018fcbd48 Mon Sep 17 00:00:00 2001 From: frheault Date: Tue, 4 Aug 2020 21:48:52 -0400 Subject: [PATCH 06/40] More robust for thresholding input --- scripts/scil_run_commit.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/scripts/scil_run_commit.py b/scripts/scil_run_commit.py index a9257972e..4fe0fa5ff 100755 --- a/scripts/scil_run_commit.py +++ b/scripts/scil_run_commit.py @@ -128,7 +128,7 @@ def _build_arg_parser(): g2.add_argument('--keep_whole_tractogram', action='store_true', help='Save a tractogram copy with streamlines weights in ' 'the data_per_streamline\n[%(default)s].') - g2.add_argument('--threshold_weights', type=float, metavar='THRESHOLD', + g2.add_argument('--threshold_weights', metavar='THRESHOLD', default=0., help='Split the tractogram in two; essential and\n' 'nonessential, based on the provided threshold ' @@ -196,6 +196,13 @@ def main(): parser.error('{} does not have a compatible header with {}'.format( args.in_tractogram, args.in_dwi)) + if args.threshold_weights == 'None': + args.threshold_weights = None + if not args.keep_whole_tractogram and ext != '.h5': + logging.warning('Not thresholding weigth with trk file without ' + 'the --keep_whole_tractogram will not save a ' + 'tractogram') + # COMMIT has some c-level stdout and non-logging print that cannot # be easily stopped. Manual redirection of all printed output if args.verbose: @@ -357,13 +364,13 @@ def main(): group['offsets'] = tmp_streamlines._offsets group['lengths'] = tmp_streamlines._lengths - repeat_commit_weights = np.ones((len(tmp_streamlines._offsets))) * \ + tmp_commit_weights = np.ones((len(tmp_streamlines._offsets))) * \ np.average(tmp_commit_weights) if 'commit_weights' in group: del group['commit_weights'] group.create_dataset('commit_weights', - data=repeat_commit_weights) + data=tmp_commit_weights) files = os.listdir(commit_results_dir) for f in files: From d4f1bf7741df41ae24c318b01467df9c0b3aeb99 Mon Sep 17 00:00:00 2001 From: frheault Date: Tue, 4 Aug 2020 22:14:03 -0400 Subject: [PATCH 07/40] Fix float --- scripts/scil_run_commit.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/scil_run_commit.py b/scripts/scil_run_commit.py index 4fe0fa5ff..e33be9083 100755 --- a/scripts/scil_run_commit.py +++ b/scripts/scil_run_commit.py @@ -202,6 +202,8 @@ def main(): logging.warning('Not thresholding weigth with trk file without ' 'the --keep_whole_tractogram will not save a ' 'tractogram') + else: + args.threshold_weights = float(args.threshold_weights) # COMMIT has some c-level stdout and non-logging print that cannot # be easily stopped. Manual redirection of all printed output From 2410a1ea2e3db0fac41964b2dd7c7153497f504e Mon Sep 17 00:00:00 2001 From: frheault Date: Tue, 4 Aug 2020 22:16:26 -0400 Subject: [PATCH 08/40] Support lowercase --- scripts/scil_run_commit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/scil_run_commit.py b/scripts/scil_run_commit.py index e33be9083..5689ad4ee 100755 --- a/scripts/scil_run_commit.py +++ b/scripts/scil_run_commit.py @@ -196,7 +196,7 @@ def main(): parser.error('{} does not have a compatible header with {}'.format( args.in_tractogram, args.in_dwi)) - if args.threshold_weights == 'None': + if args.threshold_weights == 'None' or args.threshold_weights == 'none': args.threshold_weights = None if not args.keep_whole_tractogram and ext != '.h5': logging.warning('Not thresholding weigth with trk file without ' From 10bba2cac8104f4642926d4a38ac927483c3086c Mon Sep 17 00:00:00 2001 From: frheault Date: Wed, 12 Aug 2020 11:14:40 -0400 Subject: [PATCH 09/40] Robust for empty connection --- scilpy/io/streamlines.py | 14 +++++++++----- scripts/scil_compute_hdf5_average_density_map.py | 4 ++++ scripts/scil_compute_mean_fixel_afd_from_hdf5.py | 2 ++ scripts/scil_save_connections_from_hdf5.py | 15 +++++++++++---- 4 files changed, 26 insertions(+), 9 deletions(-) diff --git a/scilpy/io/streamlines.py b/scilpy/io/streamlines.py index c13779625..a8f0687df 100644 --- a/scilpy/io/streamlines.py +++ b/scilpy/io/streamlines.py @@ -225,9 +225,13 @@ def reconstruct_streamlines(data, offsets, lengths, indices=None): if indices is None: indices = np.arange(len(offsets)) - streamlines = [] - for i in indices: - streamline = data[offsets[i]*3:offsets[i]*3 + lengths[i]*3] - streamlines.append(streamline.reshape((lengths[i], 3))) + streamlines = ArraySequence() + streamlines._data = data.reshape((len(data) // 3, 3)) - return ArraySequence(streamlines) + streamlines._offsets = offsets + streamlines._lengths = lengths + + if indices is None: + return streamlines + else: + return streamlines[indices] diff --git a/scripts/scil_compute_hdf5_average_density_map.py b/scripts/scil_compute_hdf5_average_density_map.py index de746b5ca..d275a8285 100755 --- a/scripts/scil_compute_hdf5_average_density_map.py +++ b/scripts/scil_compute_hdf5_average_density_map.py @@ -70,7 +70,11 @@ def _average_wrapper(args): raise IOError('{} do not have a compatible header'.format( hdf5_filename)) # scil_decompose_connectivity.py saves the streamlines in VOX/CORNER + streamlines = reconstruct_streamlines_from_hdf5(hdf5_file, key) + if len(streamlines) == 0: + return + density = compute_tract_counts_map(streamlines, dimensions) hdf5_file.close() diff --git a/scripts/scil_compute_mean_fixel_afd_from_hdf5.py b/scripts/scil_compute_mean_fixel_afd_from_hdf5.py index e77a82544..5b6abe1d6 100755 --- a/scripts/scil_compute_mean_fixel_afd_from_hdf5.py +++ b/scripts/scil_compute_mean_fixel_afd_from_hdf5.py @@ -58,6 +58,8 @@ def _afd_rd_wrapper(args): dimensions = in_hdf5_file.attrs['dimensions'] voxel_sizes = in_hdf5_file.attrs['voxel_sizes'] streamlines = reconstruct_streamlines_from_hdf5(in_hdf5_file, key) + if len(streamlines) == 0: + return key, 0, 0 in_hdf5_file.close() header = create_nifti_header(affine, dimensions, voxel_sizes) diff --git a/scripts/scil_save_connections_from_hdf5.py b/scripts/scil_save_connections_from_hdf5.py index cffd52163..d39656f24 100755 --- a/scripts/scil_save_connections_from_hdf5.py +++ b/scripts/scil_save_connections_from_hdf5.py @@ -76,12 +76,18 @@ def main(): else: selected_keys = keys + + affine = hdf5_file.attrs['affine'] + dimensions = hdf5_file.attrs['dimensions'] + voxel_sizes = hdf5_file.attrs['voxel_sizes'] + header = create_nifti_header(affine, dimensions, voxel_sizes) for key in selected_keys: - affine = hdf5_file.attrs['affine'] - dimensions = hdf5_file.attrs['dimensions'] - voxel_sizes = hdf5_file.attrs['voxel_sizes'] streamlines = reconstruct_streamlines_from_hdf5(hdf5_file, key) - header = create_nifti_header(affine, dimensions, voxel_sizes) + if len(streamlines) < 3: + print(streamlines) + print(key, len(streamlines)) + if len(streamlines) == 0: + continue sft = StatefulTractogram(streamlines, header, Space.VOX, origin=Origin.TRACKVIS) if args.include_dps: @@ -91,6 +97,7 @@ def main(): save_tractogram(sft, '{}.trk' .format(os.path.join(args.out_dir, key))) + print() hdf5_file.close() From 7df9e09f46ae500b8471ca7e70c2817bfd08dd8c Mon Sep 17 00:00:00 2001 From: frheault Date: Wed, 12 Aug 2020 14:23:21 -0400 Subject: [PATCH 10/40] Revert to reconstruct --- scilpy/io/streamlines.py | 19 ++++++------------- .../scil_compute_hdf5_average_density_map.py | 2 +- 2 files changed, 7 insertions(+), 14 deletions(-) diff --git a/scilpy/io/streamlines.py b/scilpy/io/streamlines.py index a8f0687df..e0eddc411 100644 --- a/scilpy/io/streamlines.py +++ b/scilpy/io/streamlines.py @@ -5,8 +5,8 @@ import tempfile from dipy.io.streamline import load_tractogram +import h5py import nibabel as nib -from nibabel.streamlines.array_sequence import ArraySequence import numpy as np @@ -219,19 +219,12 @@ def reconstruct_streamlines(data, offsets, lengths, indices=None): List of streamlines. """ - if data.ndim == 2: - data = np.array(data).flatten() - if indices is None: indices = np.arange(len(offsets)) - streamlines = ArraySequence() - streamlines._data = data.reshape((len(data) // 3, 3)) - - streamlines._offsets = offsets - streamlines._lengths = lengths + streamlines = [] + for i in indices: + streamline = data[offsets[i]*3:offsets[i]*3 + lengths[i]*3] + streamlines.append(streamline.reshape((lengths[i], 3))) - if indices is None: - return streamlines - else: - return streamlines[indices] + return streamlines diff --git a/scripts/scil_compute_hdf5_average_density_map.py b/scripts/scil_compute_hdf5_average_density_map.py index d275a8285..a6e29cc63 100755 --- a/scripts/scil_compute_hdf5_average_density_map.py +++ b/scripts/scil_compute_hdf5_average_density_map.py @@ -73,7 +73,7 @@ def _average_wrapper(args): streamlines = reconstruct_streamlines_from_hdf5(hdf5_file, key) if len(streamlines) == 0: - return + continue density = compute_tract_counts_map(streamlines, dimensions) hdf5_file.close() From 1983b470542e6313f2d00f98a663fc3a8a2d2d87 Mon Sep 17 00:00:00 2001 From: frheault Date: Wed, 12 Aug 2020 14:34:38 -0400 Subject: [PATCH 11/40] Using arraySequence --- scilpy/io/streamlines.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/scilpy/io/streamlines.py b/scilpy/io/streamlines.py index e0eddc411..c13779625 100644 --- a/scilpy/io/streamlines.py +++ b/scilpy/io/streamlines.py @@ -5,8 +5,8 @@ import tempfile from dipy.io.streamline import load_tractogram -import h5py import nibabel as nib +from nibabel.streamlines.array_sequence import ArraySequence import numpy as np @@ -219,6 +219,9 @@ def reconstruct_streamlines(data, offsets, lengths, indices=None): List of streamlines. """ + if data.ndim == 2: + data = np.array(data).flatten() + if indices is None: indices = np.arange(len(offsets)) @@ -227,4 +230,4 @@ def reconstruct_streamlines(data, offsets, lengths, indices=None): streamline = data[offsets[i]*3:offsets[i]*3 + lengths[i]*3] streamlines.append(streamline.reshape((lengths[i], 3))) - return streamlines + return ArraySequence(streamlines) From d29da9bf215871012f9706c2258d4d6b5a0a82b3 Mon Sep 17 00:00:00 2001 From: frheault Date: Wed, 12 Aug 2020 15:41:32 -0400 Subject: [PATCH 12/40] Cleaning --- scripts/scil_save_connections_from_hdf5.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/scripts/scil_save_connections_from_hdf5.py b/scripts/scil_save_connections_from_hdf5.py index 0aa43f119..8a7ae2b00 100755 --- a/scripts/scil_save_connections_from_hdf5.py +++ b/scripts/scil_save_connections_from_hdf5.py @@ -58,7 +58,6 @@ def _build_arg_parser(): p.add_argument('--save_empty', action='store_true', help='Save empty connections.') - p.add_argument('--labels_list', help='A txt file containing a list ' 'saved by the decomposition script.') @@ -105,10 +104,8 @@ def main(): header = create_nifti_header(affine, dimensions, voxel_sizes) for key in selected_keys: streamlines = reconstruct_streamlines_from_hdf5(hdf5_file, key) - if len(streamlines) < 3: - print(streamlines) - print(key, len(streamlines)) - if len(streamlines) == 0: + + if len(streamlines) == 0 and not args.save_empty: continue sft = StatefulTractogram(streamlines, header, Space.VOX, origin=Origin.TRACKVIS) @@ -119,7 +116,6 @@ def main(): save_tractogram(sft, '{}.trk' .format(os.path.join(args.out_dir, key))) - print() hdf5_file.close() From da39b8db447ac96f3a9aa3b9e695b93d821d94ff Mon Sep 17 00:00:00 2001 From: frheault Date: Sat, 15 Aug 2020 13:32:39 -0400 Subject: [PATCH 13/40] Manage filesize after commit filtering --- scilpy/tractanalysis/features.py | 1 + scripts/scil_decompose_connectivity.py | 5 +- scripts/scil_run_commit.py | 61 ++++++++++++---------- scripts/scil_save_connections_from_hdf5.py | 2 +- 4 files changed, 37 insertions(+), 32 deletions(-) diff --git a/scilpy/tractanalysis/features.py b/scilpy/tractanalysis/features.py index 94a934139..fb742e0bc 100644 --- a/scilpy/tractanalysis/features.py +++ b/scilpy/tractanalysis/features.py @@ -47,6 +47,7 @@ def remove_loops_and_sharp_turns(streamlines, streamlines_clean.append(s) if use_qb: + ids = [] if len(streamlines_clean) > 1: curvature = [] diff --git a/scripts/scil_decompose_connectivity.py b/scripts/scil_decompose_connectivity.py index 7780ff4eb..034c51203 100755 --- a/scripts/scil_decompose_connectivity.py +++ b/scripts/scil_decompose_connectivity.py @@ -103,8 +103,8 @@ def _save_if_needed(sft, hdf5_file, args, in_label, out_label): if step_type == 'final': group = hdf5_file.create_group('{}_{}'.format(in_label, out_label)) - group.create_dataset('data', data=np.asarray(sft.streamlines.get_data(), - dtype=np.float32)) + group.create_dataset('data', data=sft.streamlines.get_data(), + dtype=np.float32) group.create_dataset('offsets', data=sft.streamlines._offsets, dtype=np.int64) group.create_dataset('lengths', data=sft.streamlines._lengths, @@ -460,6 +460,7 @@ def main(): args.loop_max_angle, use_qb=True, qb_threshold=args.curv_qb_distance) + no_qb_curv = inliers[no_qb_curv_ids] qb_curv_ids = np.setdiff1d(np.arange(len(inliers)), no_qb_curv_ids) diff --git a/scripts/scil_run_commit.py b/scripts/scil_run_commit.py index 5689ad4ee..e502eb2c9 100755 --- a/scripts/scil_run_commit.py +++ b/scripts/scil_run_commit.py @@ -219,8 +219,7 @@ def main(): if ext == '.h5': logging.debug('Reconstructing {} into a tractogram for COMMIT.'.format( args.in_tractogram)) - streamlines = [] - len_list = [0] + hdf5_file = h5py.File(args.in_tractogram, 'r') if not (np.allclose(hdf5_file.attrs['affine'], dwi_img.affine) and np.allclose(hdf5_file.attrs['dimensions'], dwi_img.shape[0:3])): @@ -230,12 +229,15 @@ def main(): # Keep track of the order of connections/streamlines in relation to the # tractogram as well as the number of streamlines for each connection. hdf5_keys = list(hdf5_file.keys()) + streamlines = [] + offsets_list = [0] for key in hdf5_keys: tmp_streamlines = reconstruct_streamlines_from_hdf5(hdf5_file, key) - len_list.append(len(tmp_streamlines)) + offsets_list.append(len(tmp_streamlines)) streamlines.extend(tmp_streamlines) - len_list = np.cumsum(len_list) + + offsets_list = np.cumsum(offsets_list) sft = StatefulTractogram(streamlines, args.in_dwi, Space.VOX, origin=Origin.TRACKVIS) @@ -243,14 +245,13 @@ def main(): # Keeping the input variable, saving trk file for COMMIT internal use save_tractogram(sft, tmp_tractogram_filename) - initial_hdf5_filename = args.in_tractogram args.in_tractogram = tmp_tractogram_filename + # Writing the scheme file with proper shells tmp_scheme_filename = os.path.join(tmp_dir.name, 'gradients.scheme') tmp_bval_filename = os.path.join(tmp_dir.name, 'bval') bvals, _ = read_bvals_bvecs(args.in_bval, args.in_bvec) - shells_centroids, indices_shells = identify_shells(bvals, - args.b_thr, + shells_centroids, indices_shells = identify_shells(bvals, args.b_thr, roundCentroids=True) np.savetxt(tmp_bval_filename, shells_centroids[indices_shells], newline=' ', fmt='%i') @@ -341,38 +342,40 @@ def main(): if ext == '.h5': new_filename = os.path.join(commit_results_dir, 'decompose_commit.h5') - shutil.copy(initial_hdf5_filename, new_filename) - hdf5_file = h5py.File(new_filename, 'a') - + new_hdf5_file = h5py.File(new_filename, 'w') + new_hdf5_file.attrs['affine'] = sft.affine + new_hdf5_file.attrs['dimensions'] = sft.dimensions + new_hdf5_file.attrs['voxel_sizes'] = sft.voxel_sizes + new_hdf5_file.attrs['voxel_order'] = sft.voxel_order # Assign the weights into the hdf5, while respecting the ordering of # connections/streamlines logging.debug('Adding commit weights to {}.'.format(new_filename)) for i, key in enumerate(hdf5_keys): - group = hdf5_file[key] - tmp_commit_weights = commit_weights[len_list[i]:len_list[i+1]] + new_group = new_hdf5_file.create_group(key) + old_group = hdf5_file[key] + tmp_commit_weights = commit_weights[offsets_list[i] + :offsets_list[i+1]] if args.threshold_weights is not None: essential_ind = np.where( tmp_commit_weights > args.threshold_weights)[0] - tmp_streamlines = reconstruct_streamlines(group['data'], - group['offsets'], - group['lengths'], + tmp_streamlines = reconstruct_streamlines(old_group['data'], + old_group['offsets'], + old_group['lengths'], indices=essential_ind) + # Replacing the data with the one above the threshold # Safe since this hdf5 was a copy in the first place - del group['data'] - del group['offsets'] - del group['lengths'] - group['data'] = tmp_streamlines.get_data() - group['offsets'] = tmp_streamlines._offsets - group['lengths'] = tmp_streamlines._lengths - - tmp_commit_weights = np.ones((len(tmp_streamlines._offsets))) * \ - np.average(tmp_commit_weights) - - if 'commit_weights' in group: - del group['commit_weights'] - group.create_dataset('commit_weights', - data=tmp_commit_weights) + new_group.create_dataset('data', data=tmp_streamlines.get_data(), + dtype=np.float32) + new_group.create_dataset('offsets', data=tmp_streamlines._offsets, + dtype=np.int64) + new_group.create_dataset('lengths', data=tmp_streamlines._lengths, + dtype=np.int32) + + for dps_key in hdf5_file[key].keys(): + if dps_key not in ['data', 'offsets', 'lengths']: + new_group.create_dataset(key, data=hdf5_file[key][dps_key]) + new_group.create_dataset('commit_weights', data=tmp_commit_weights) files = os.listdir(commit_results_dir) for f in files: diff --git a/scripts/scil_save_connections_from_hdf5.py b/scripts/scil_save_connections_from_hdf5.py index 8a7ae2b00..a97cf23d6 100755 --- a/scripts/scil_save_connections_from_hdf5.py +++ b/scripts/scil_save_connections_from_hdf5.py @@ -97,7 +97,6 @@ def main(): else: selected_keys = keys - affine = hdf5_file.attrs['affine'] dimensions = hdf5_file.attrs['dimensions'] voxel_sizes = hdf5_file.attrs['voxel_sizes'] @@ -107,6 +106,7 @@ def main(): if len(streamlines) == 0 and not args.save_empty: continue + sft = StatefulTractogram(streamlines, header, Space.VOX, origin=Origin.TRACKVIS) if args.include_dps: From 9d853ed9f55dd4828f21b1f068e84514cec425b0 Mon Sep 17 00:00:00 2001 From: frheault Date: Sat, 15 Aug 2020 15:23:17 -0400 Subject: [PATCH 14/40] Empty in transfo --- scripts/scil_apply_transform_to_hdf5.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/scripts/scil_apply_transform_to_hdf5.py b/scripts/scil_apply_transform_to_hdf5.py index 2f1d9ffe3..11979f884 100755 --- a/scripts/scil_apply_transform_to_hdf5.py +++ b/scripts/scil_apply_transform_to_hdf5.py @@ -97,6 +97,9 @@ def main(): voxel_sizes = in_hdf5_file.attrs['voxel_sizes'] streamlines = reconstruct_streamlines_from_hdf5(in_hdf5_file, key) + if len(streamlines) == 0: + continue + header = create_nifti_header(affine, dimensions, voxel_sizes) moving_sft = StatefulTractogram(streamlines, header, Space.VOX, origin=Origin.TRACKVIS) @@ -109,7 +112,8 @@ def main(): new_sft.to_vox() new_sft.to_corner() - affine, dimensions, voxel_sizes, voxel_order = get_reference_info(target_img) + affine, dimensions, voxel_sizes, voxel_order = get_reference_info( + target_img) out_hdf5_file.attrs['affine'] = affine out_hdf5_file.attrs['dimensions'] = dimensions out_hdf5_file.attrs['voxel_sizes'] = voxel_sizes From 1af6f1ebf1703a98216712cc1d6646a2e92cb71a Mon Sep 17 00:00:00 2001 From: frheault Date: Tue, 18 Aug 2020 09:57:50 -0400 Subject: [PATCH 15/40] close commit --- scripts/scil_run_commit.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/scil_run_commit.py b/scripts/scil_run_commit.py index e502eb2c9..b70f87b11 100755 --- a/scripts/scil_run_commit.py +++ b/scripts/scil_run_commit.py @@ -376,6 +376,7 @@ def main(): if dps_key not in ['data', 'offsets', 'lengths']: new_group.create_dataset(key, data=hdf5_file[key][dps_key]) new_group.create_dataset('commit_weights', data=tmp_commit_weights) + new_hdf5_file.close() files = os.listdir(commit_results_dir) for f in files: From af752716696a00c2a3c03416960bf7f7d8998dde Mon Sep 17 00:00:00 2001 From: frheault Date: Wed, 19 Aug 2020 10:30:22 -0400 Subject: [PATCH 16/40] Switch to using with statement for safety --- scripts/scil_apply_transform_to_hdf5.py | 100 +++--- .../scil_compute_hdf5_average_density_map.py | 35 +- .../scil_compute_mean_fixel_afd_from_hdf5.py | 43 ++- scripts/scil_decompose_connectivity.py | 330 +++++++++--------- scripts/scil_run_commit.py | 76 ++-- scripts/scil_save_connections_from_hdf5.py | 81 +++-- 6 files changed, 330 insertions(+), 335 deletions(-) diff --git a/scripts/scil_apply_transform_to_hdf5.py b/scripts/scil_apply_transform_to_hdf5.py index 11979f884..be1362ff6 100755 --- a/scripts/scil_apply_transform_to_hdf5.py +++ b/scripts/scil_apply_transform_to_hdf5.py @@ -80,55 +80,57 @@ def main(): if os.path.isfile(args.out_hdf5): os.remove(args.out_hdf5) - in_hdf5_file = h5py.File(args.in_hdf5, 'r') - shutil.copy(args.in_hdf5, args.out_hdf5) - out_hdf5_file = h5py.File(args.out_hdf5, 'a') - transfo = load_matrix_in_any_format(args.in_transfo) - - deformation_data = None - if args.in_deformation is not None: - deformation_data = np.squeeze(nib.load( - args.in_deformation).get_fdata(dtype=np.float32)) - target_img = nib.load(args.in_target_file) - - for key in in_hdf5_file.keys(): - affine = in_hdf5_file.attrs['affine'] - dimensions = in_hdf5_file.attrs['dimensions'] - voxel_sizes = in_hdf5_file.attrs['voxel_sizes'] - streamlines = reconstruct_streamlines_from_hdf5(in_hdf5_file, key) - - if len(streamlines) == 0: - continue - - header = create_nifti_header(affine, dimensions, voxel_sizes) - moving_sft = StatefulTractogram(streamlines, header, Space.VOX, - origin=Origin.TRACKVIS) - - new_sft = transform_warp_streamlines(moving_sft, transfo, target_img, - inverse=args.inverse, - deformation_data=deformation_data, - remove_invalid=not args.cut_invalid, - cut_invalid=args.cut_invalid) - new_sft.to_vox() - new_sft.to_corner() - - affine, dimensions, voxel_sizes, voxel_order = get_reference_info( - target_img) - out_hdf5_file.attrs['affine'] = affine - out_hdf5_file.attrs['dimensions'] = dimensions - out_hdf5_file.attrs['voxel_sizes'] = voxel_sizes - out_hdf5_file.attrs['voxel_order'] = voxel_order - - group = out_hdf5_file[key] - del group['data'] - group.create_dataset('data', data=new_sft.streamlines.get_data()) - del group['offsets'] - group.create_dataset('offsets', data=new_sft.streamlines._offsets) - del group['lengths'] - group.create_dataset('lengths', data=new_sft.streamlines._lengths) - - in_hdf5_file.close() - out_hdf5_file.close() + with h5py.File(args.in_hdf5, 'r') as in_hdf5_file: + shutil.copy(args.in_hdf5, args.out_hdf5) + with h5py.File(args.out_hdf5, 'a') as out_hdf5_file: + transfo = load_matrix_in_any_format(args.in_transfo) + + deformation_data = None + if args.in_deformation is not None: + deformation_data = np.squeeze(nib.load( + args.in_deformation).get_fdata(dtype=np.float32)) + target_img = nib.load(args.in_target_file) + + for key in in_hdf5_file.keys(): + affine = in_hdf5_file.attrs['affine'] + dimensions = in_hdf5_file.attrs['dimensions'] + voxel_sizes = in_hdf5_file.attrs['voxel_sizes'] + streamlines = reconstruct_streamlines_from_hdf5( + in_hdf5_file, key) + + if len(streamlines) == 0: + continue + + header = create_nifti_header(affine, dimensions, voxel_sizes) + moving_sft = StatefulTractogram(streamlines, header, Space.VOX, + origin=Origin.TRACKVIS) + + new_sft = transform_warp_streamlines( + moving_sft, transfo, target_img, + inverse=args.inverse, + deformation_data=deformation_data, + remove_invalid=not args.cut_invalid, + cut_invalid=args.cut_invalid) + new_sft.to_vox() + new_sft.to_corner() + + affine, dimensions, voxel_sizes, voxel_order = get_reference_info( + target_img) + out_hdf5_file.attrs['affine'] = affine + out_hdf5_file.attrs['dimensions'] = dimensions + out_hdf5_file.attrs['voxel_sizes'] = voxel_sizes + out_hdf5_file.attrs['voxel_order'] = voxel_order + + group = out_hdf5_file[key] + del group['data'] + group.create_dataset('data', + data=new_sft.streamlines.get_data()) + del group['offsets'] + group.create_dataset('offsets', + data=new_sft.streamlines._offsets) + del group['lengths'] + group.create_dataset('lengths', + data=new_sft.streamlines._lengths) if __name__ == "__main__": diff --git a/scripts/scil_compute_hdf5_average_density_map.py b/scripts/scil_compute_hdf5_average_density_map.py index a6e29cc63..1841a53fa 100755 --- a/scripts/scil_compute_hdf5_average_density_map.py +++ b/scripts/scil_compute_hdf5_average_density_map.py @@ -58,25 +58,23 @@ def _average_wrapper(args): binary = args[2] out_dir = args[3] - hdf5_file_ref = h5py.File(hdf5_filenames[0], 'r') - affine = hdf5_file_ref.attrs['affine'] - dimensions = hdf5_file_ref.attrs['dimensions'] - density_data = np.zeros(dimensions, dtype=np.float32) + with h5py.File(hdf5_filenames[0], 'r') as hdf5_file_ref: + affine = hdf5_file_ref.attrs['affine'] + dimensions = hdf5_file_ref.attrs['dimensions'] + density_data = np.zeros(dimensions, dtype=np.float32) for hdf5_filename in hdf5_filenames: - hdf5_file = h5py.File(hdf5_filename, 'r') + with h5py.File(hdf5_filename, 'r') as hdf5_file: + if not (np.allclose(hdf5_file.attrs['affine'], affine) + and np.allclose(hdf5_file.attrs['dimensions'], dimensions)): + raise IOError('{} do not have a compatible header'.format( + hdf5_filename)) - if not (np.allclose(hdf5_file.attrs['affine'], affine) - and np.allclose(hdf5_file.attrs['dimensions'], dimensions)): - raise IOError('{} do not have a compatible header'.format( - hdf5_filename)) - # scil_decompose_connectivity.py saves the streamlines in VOX/CORNER + streamlines = reconstruct_streamlines_from_hdf5(hdf5_file, key) + if len(streamlines) == 0: + continue - streamlines = reconstruct_streamlines_from_hdf5(hdf5_file, key) - if len(streamlines) == 0: - continue - - density = compute_tract_counts_map(streamlines, dimensions) - hdf5_file.close() + # scil_decompose_connectivity.py saves the streamlines in VOX/CORNER + density = compute_tract_counts_map(streamlines, dimensions) if binary: density_data[density > 0] += 1 @@ -100,9 +98,8 @@ def main(): keys = [] for filename in args.in_hdf5: - curr_file = h5py.File(filename, 'r') - keys.extend(curr_file.keys()) - curr_file.close() + with h5py.File(filename, 'r') as curr_file: + keys.extend(curr_file.keys()) nbr_cpu = validate_nbr_processes(parser, args, args.nbr_processes) if nbr_cpu == 1: diff --git a/scripts/scil_compute_mean_fixel_afd_from_hdf5.py b/scripts/scil_compute_mean_fixel_afd_from_hdf5.py index 5b6abe1d6..5bf203ba7 100755 --- a/scripts/scil_compute_mean_fixel_afd_from_hdf5.py +++ b/scripts/scil_compute_mean_fixel_afd_from_hdf5.py @@ -42,7 +42,8 @@ [1] Raffelt, D., Tournier, JD., Rose, S., Ridgway, GR., Henderson, R., Crozier, S., Salvado, O., & Connelly, A. (2012). Apparent Fibre Density: a novel measure for the analysis of - diffusion-weighted magnetic resonance images. NeuroImage, 59(4), 3976--3994. + diffusion-weighted magnetic resonance images. NeuroImage, + 59(4), 3976--3994. """ @@ -53,14 +54,13 @@ def _afd_rd_wrapper(args): sh_basis = args[3] length_weighting = args[4] - in_hdf5_file = h5py.File(in_hdf5_filename, 'r') - affine = in_hdf5_file.attrs['affine'] - dimensions = in_hdf5_file.attrs['dimensions'] - voxel_sizes = in_hdf5_file.attrs['voxel_sizes'] - streamlines = reconstruct_streamlines_from_hdf5(in_hdf5_file, key) - if len(streamlines) == 0: - return key, 0, 0 - in_hdf5_file.close() + with h5py.File(in_hdf5_filename, 'r') as in_hdf5_file: + affine = in_hdf5_file.attrs['affine'] + dimensions = in_hdf5_file.attrs['dimensions'] + voxel_sizes = in_hdf5_file.attrs['voxel_sizes'] + streamlines = reconstruct_streamlines_from_hdf5(in_hdf5_file, key) + if len(streamlines) == 0: + return key, 0, 0 header = create_nifti_header(affine, dimensions, voxel_sizes) sft = StatefulTractogram(streamlines, header, Space.VOX, @@ -109,9 +109,9 @@ def main(): fodf_img = nib.load(args.in_fodf) nbr_cpu = validate_nbr_processes(parser, args, args.nbr_processes) - in_hdf5_file = h5py.File(args.in_hdf5, 'r') - keys = list(in_hdf5_file.keys()) - in_hdf5_file.close() + with h5py.File(args.in_hdf5, 'r') as in_hdf5_file: + keys = list(in_hdf5_file.keys()) + if nbr_cpu == 1: results_list = [] for key in keys: @@ -131,16 +131,15 @@ def main(): pool.join() shutil.copy(args.in_hdf5, args.out_hdf5) - out_hdf5_file = h5py.File(args.out_hdf5, 'a') - for key, afd_fixel, rd_fixel in results_list: - group = out_hdf5_file[key] - if 'afd_fixel' in group: - del group['afd_fixel'] - group.create_dataset('afd_fixel', data=afd_fixel) - if 'rd_fixel' in group: - del group['rd_fixel'] - group.create_dataset('rd_fixel', data=rd_fixel) - out_hdf5_file.close() + with h5py.File(args.out_hdf5, 'a') as out_hdf5_file: + for key, afd_fixel, rd_fixel in results_list: + group = out_hdf5_file[key] + if 'afd_fixel' in group: + del group['afd_fixel'] + group.create_dataset('afd_fixel', data=afd_fixel) + if 'rd_fixel' in group: + del group['rd_fixel'] + group.create_dataset('rd_fixel', data=rd_fixel) if __name__ == '__main__': diff --git a/scripts/scil_decompose_connectivity.py b/scripts/scil_decompose_connectivity.py index 034c51203..532aa628f 100755 --- a/scripts/scil_decompose_connectivity.py +++ b/scripts/scil_decompose_connectivity.py @@ -307,185 +307,185 @@ def main(): comb_list.extend(zip(real_labels, real_labels)) iteration_counter = 0 - hdf5_file = h5py.File(args.out_hdf5, 'w') - affine, dimensions, voxel_sizes, voxel_order = get_reference_info(sft) - hdf5_file.attrs['affine'] = affine - hdf5_file.attrs['dimensions'] = dimensions - hdf5_file.attrs['voxel_sizes'] = voxel_sizes - hdf5_file.attrs['voxel_order'] = voxel_order - - # Each connections is processed independently. Multiprocessing would be - # a burden on the I/O of most SSD/HD - for in_label, out_label in comb_list: - if iteration_counter > 0 and iteration_counter % 100 == 0: - logging.info('Split {} nodes out of {}'.format(iteration_counter, - len(comb_list))) - iteration_counter += 1 - - pair_info = [] - if in_label not in con_info: - continue - elif out_label in con_info[in_label]: - pair_info.extend(con_info[in_label][out_label]) - - if out_label not in con_info: - continue - elif in_label in con_info[out_label]: - pair_info.extend(con_info[out_label][in_label]) - - if not len(pair_info): - continue - - connecting_streamlines = [] - connecting_ids = [] - for connection in pair_info: - strl_idx = connection['strl_idx'] - curr_streamlines = compute_streamline_segment( - sft.streamlines[strl_idx], - indices[strl_idx], - connection['in_idx'], - connection['out_idx'], - points_to_idx[strl_idx]) - connecting_streamlines.append(curr_streamlines) - connecting_ids.append(strl_idx) - - # Each step is processed from the previous 'success' - # 1. raw -> length pass/fail - # 2. length pass -> loops pass/fail - # 3. loops pass -> outlier detection pass/fail - # 4. outlier detection pass -> qb curvature pass/fail - # 5. qb curvature pass == final connections - connecting_streamlines = ArraySequence(connecting_streamlines) - - raw_sft = StatefulTractogram.from_sft( - connecting_streamlines, sft, - data_per_streamline=sft.data_per_streamline[connecting_ids], - data_per_point={}) - _save_if_needed(raw_sft, hdf5_file, args, - 'raw', 'raw', in_label, out_label) - - # Doing all post-processing - if not args.no_pruning: - valid_length_ids, invalid_length_ids = _prune_segments( - raw_sft.streamlines, - args.min_length, - args.max_length, - vox_sizes[0]) - - valid_length = connecting_streamlines[valid_length_ids] - invalid_length = connecting_streamlines[invalid_length_ids] - invalid_length_sft = StatefulTractogram.from_sft( - invalid_length, raw_sft, - data_per_streamline=raw_sft.data_per_streamline[invalid_length_ids], + with h5py.File(args.out_hdf5, 'w') as hdf5_file: + affine, dimensions, voxel_sizes, voxel_order = get_reference_info(sft) + hdf5_file.attrs['affine'] = affine + hdf5_file.attrs['dimensions'] = dimensions + hdf5_file.attrs['voxel_sizes'] = voxel_sizes + hdf5_file.attrs['voxel_order'] = voxel_order + + # Each connections is processed independently. Multiprocessing would be + # a burden on the I/O of most SSD/HD + for in_label, out_label in comb_list: + if iteration_counter > 0 and iteration_counter % 100 == 0: + logging.info('Split {} nodes out of {}'.format(iteration_counter, + len(comb_list))) + iteration_counter += 1 + + pair_info = [] + if in_label not in con_info: + continue + elif out_label in con_info[in_label]: + pair_info.extend(con_info[in_label][out_label]) + + if out_label not in con_info: + continue + elif in_label in con_info[out_label]: + pair_info.extend(con_info[out_label][in_label]) + + if not len(pair_info): + continue + + connecting_streamlines = [] + connecting_ids = [] + for connection in pair_info: + strl_idx = connection['strl_idx'] + curr_streamlines = compute_streamline_segment( + sft.streamlines[strl_idx], + indices[strl_idx], + connection['in_idx'], + connection['out_idx'], + points_to_idx[strl_idx]) + connecting_streamlines.append(curr_streamlines) + connecting_ids.append(strl_idx) + + # Each step is processed from the previous 'success' + # 1. raw -> length pass/fail + # 2. length pass -> loops pass/fail + # 3. loops pass -> outlier detection pass/fail + # 4. outlier detection pass -> qb curvature pass/fail + # 5. qb curvature pass == final connections + connecting_streamlines = ArraySequence(connecting_streamlines) + + raw_sft = StatefulTractogram.from_sft( + connecting_streamlines, sft, + data_per_streamline=sft.data_per_streamline[connecting_ids], + data_per_point={}) + _save_if_needed(raw_sft, hdf5_file, args, + 'raw', 'raw', in_label, out_label) + + # Doing all post-processing + if not args.no_pruning: + valid_length_ids, invalid_length_ids = _prune_segments( + raw_sft.streamlines, + args.min_length, + args.max_length, + vox_sizes[0]) + + valid_length = connecting_streamlines[valid_length_ids] + invalid_length = connecting_streamlines[invalid_length_ids] + invalid_length_sft = StatefulTractogram.from_sft( + invalid_length, raw_sft, + data_per_streamline=raw_sft.data_per_streamline[invalid_length_ids], + data_per_point={}) + + _save_if_needed(invalid_length_sft, hdf5_file, args, + 'discarded', 'invalid_length', + in_label, out_label) + else: + valid_length = connecting_streamlines + valid_length_ids = range(len(connecting_streamlines)) + + if not len(valid_length): + continue + + valid_length_sft = StatefulTractogram.from_sft( + valid_length, raw_sft, + data_per_streamline=raw_sft.data_per_streamline[valid_length_ids], data_per_point={}) - _save_if_needed(invalid_length_sft, hdf5_file, args, - 'discarded', 'invalid_length', - in_label, out_label) - else: - valid_length = connecting_streamlines - valid_length_ids = range(len(connecting_streamlines)) - - if not len(valid_length): - continue - - valid_length_sft = StatefulTractogram.from_sft( - valid_length, raw_sft, - data_per_streamline=raw_sft.data_per_streamline[valid_length_ids], - data_per_point={}) - - _save_if_needed(valid_length_sft, hdf5_file, args, - 'intermediate', 'valid_length', in_label, out_label) - - if not args.no_remove_loops: - no_loop_ids = remove_loops_and_sharp_turns(valid_length, - args.loop_max_angle) - no_loops = valid_length[no_loop_ids] - loop_ids = np.setdiff1d(np.arange(len(valid_length)), no_loop_ids) - loops = valid_length[loop_ids] - - loops_sft = StatefulTractogram.from_sft( - loops, valid_length_sft, - data_per_streamline=valid_length_sft.data_per_streamline[loop_ids], + _save_if_needed(valid_length_sft, hdf5_file, args, + 'intermediate', 'valid_length', in_label, out_label) + + if not args.no_remove_loops: + no_loop_ids = remove_loops_and_sharp_turns(valid_length, + args.loop_max_angle) + no_loops = valid_length[no_loop_ids] + loop_ids = np.setdiff1d( + np.arange(len(valid_length)), no_loop_ids) + loops = valid_length[loop_ids] + + loops_sft = StatefulTractogram.from_sft( + loops, valid_length_sft, + data_per_streamline=valid_length_sft.data_per_streamline[loop_ids], + data_per_point={}) + + _save_if_needed(loops_sft, hdf5_file, args, + 'discarded', 'loops', in_label, out_label) + else: + no_loops = valid_length + no_loop_ids = range(len(valid_length)) + + if not len(no_loops): + continue + + no_loops_sft = StatefulTractogram.from_sft( + no_loops, valid_length_sft, + data_per_streamline=valid_length_sft.data_per_streamline[no_loop_ids], data_per_point={}) - _save_if_needed(loops_sft, hdf5_file, args, - 'discarded', 'loops', in_label, out_label) - else: - no_loops = valid_length - no_loop_ids = range(len(valid_length)) + _save_if_needed(no_loops_sft, hdf5_file, args, + 'intermediate', 'no_loops', in_label, out_label) - if not len(no_loops): - continue + if not args.no_remove_outliers: + outliers_ids, inliers_ids = remove_outliers(no_loops, + args.outlier_threshold) + outliers = no_loops[outliers_ids] + inliers = no_loops[inliers_ids] - no_loops_sft = StatefulTractogram.from_sft( - no_loops, valid_length_sft, - data_per_streamline=valid_length_sft.data_per_streamline[no_loop_ids], - data_per_point={}) + outliers_sft = StatefulTractogram.from_sft( + outliers, no_loops_sft, + data_per_streamline=no_loops_sft.data_per_streamline[outliers_ids], + data_per_point={}) - _save_if_needed(no_loops_sft, hdf5_file, args, - 'intermediate', 'no_loops', in_label, out_label) + _save_if_needed(outliers_sft, hdf5_file, args, + 'discarded', 'outliers', in_label, out_label) + else: + inliers = no_loops + inliers_ids = range(len(no_loops)) - if not args.no_remove_outliers: - outliers_ids, inliers_ids = remove_outliers(no_loops, - args.outlier_threshold) - outliers = no_loops[outliers_ids] - inliers = no_loops[inliers_ids] + if not len(inliers): + continue - outliers_sft = StatefulTractogram.from_sft( - outliers, no_loops_sft, - data_per_streamline=no_loops_sft.data_per_streamline[outliers_ids], + inliers_sft = StatefulTractogram.from_sft( + inliers, no_loops_sft, + data_per_streamline=no_loops_sft.data_per_streamline[inliers_ids], data_per_point={}) - _save_if_needed(outliers_sft, hdf5_file, args, - 'discarded', 'outliers', in_label, out_label) - else: - inliers = no_loops - inliers_ids = range(len(no_loops)) - - if not len(inliers): - continue - - inliers_sft = StatefulTractogram.from_sft( - inliers, no_loops_sft, - data_per_streamline=no_loops_sft.data_per_streamline[inliers_ids], - data_per_point={}) - - _save_if_needed(inliers_sft, hdf5_file, args, - 'intermediate', 'inliers', in_label, out_label) - - if not args.no_remove_curv_dev: - no_qb_curv_ids = remove_loops_and_sharp_turns( - inliers, - args.loop_max_angle, - use_qb=True, - qb_threshold=args.curv_qb_distance) - - no_qb_curv = inliers[no_qb_curv_ids] - qb_curv_ids = np.setdiff1d(np.arange(len(inliers)), - no_qb_curv_ids) - qb_curv = inliers[qb_curv_ids] - - qb_curv_sft = StatefulTractogram.from_sft( - qb_curv, inliers_sft, - data_per_streamline=inliers_sft.data_per_streamline[qb_curv_ids], + _save_if_needed(inliers_sft, hdf5_file, args, + 'intermediate', 'inliers', in_label, out_label) + + if not args.no_remove_curv_dev: + no_qb_curv_ids = remove_loops_and_sharp_turns( + inliers, + args.loop_max_angle, + use_qb=True, + qb_threshold=args.curv_qb_distance) + + no_qb_curv = inliers[no_qb_curv_ids] + qb_curv_ids = np.setdiff1d(np.arange(len(inliers)), + no_qb_curv_ids) + qb_curv = inliers[qb_curv_ids] + + qb_curv_sft = StatefulTractogram.from_sft( + qb_curv, inliers_sft, + data_per_streamline=inliers_sft.data_per_streamline[qb_curv_ids], + data_per_point={}) + + _save_if_needed(qb_curv_sft, hdf5_file, args, + 'discarded', 'qb_curv', in_label, out_label) + else: + no_qb_curv = inliers + no_qb_curv_ids = range(len(inliers)) + + no_qb_curv_sft = StatefulTractogram.from_sft( + no_qb_curv, inliers_sft, + data_per_streamline=inliers_sft.data_per_streamline[no_qb_curv_ids], data_per_point={}) - _save_if_needed(qb_curv_sft, hdf5_file, args, - 'discarded', 'qb_curv', in_label, out_label) - else: - no_qb_curv = inliers - no_qb_curv_ids = range(len(inliers)) - - no_qb_curv_sft = StatefulTractogram.from_sft( - no_qb_curv, inliers_sft, - data_per_streamline=inliers_sft.data_per_streamline[no_qb_curv_ids], - data_per_point={}) - - _save_if_needed(no_qb_curv_sft, hdf5_file, args, - 'final', 'final', in_label, out_label) + _save_if_needed(no_qb_curv_sft, hdf5_file, args, + 'final', 'final', in_label, out_label) - hdf5_file.close() time2 = time.time() logging.info( ' Connections post-processing and saving took {} sec.'.format( diff --git a/scripts/scil_run_commit.py b/scripts/scil_run_commit.py index b70f87b11..0f056c985 100755 --- a/scripts/scil_run_commit.py +++ b/scripts/scil_run_commit.py @@ -342,41 +342,44 @@ def main(): if ext == '.h5': new_filename = os.path.join(commit_results_dir, 'decompose_commit.h5') - new_hdf5_file = h5py.File(new_filename, 'w') - new_hdf5_file.attrs['affine'] = sft.affine - new_hdf5_file.attrs['dimensions'] = sft.dimensions - new_hdf5_file.attrs['voxel_sizes'] = sft.voxel_sizes - new_hdf5_file.attrs['voxel_order'] = sft.voxel_order - # Assign the weights into the hdf5, while respecting the ordering of - # connections/streamlines - logging.debug('Adding commit weights to {}.'.format(new_filename)) - for i, key in enumerate(hdf5_keys): - new_group = new_hdf5_file.create_group(key) - old_group = hdf5_file[key] - tmp_commit_weights = commit_weights[offsets_list[i] - :offsets_list[i+1]] - if args.threshold_weights is not None: - essential_ind = np.where( - tmp_commit_weights > args.threshold_weights)[0] - tmp_streamlines = reconstruct_streamlines(old_group['data'], - old_group['offsets'], - old_group['lengths'], - indices=essential_ind) - - # Replacing the data with the one above the threshold - # Safe since this hdf5 was a copy in the first place - new_group.create_dataset('data', data=tmp_streamlines.get_data(), - dtype=np.float32) - new_group.create_dataset('offsets', data=tmp_streamlines._offsets, - dtype=np.int64) - new_group.create_dataset('lengths', data=tmp_streamlines._lengths, - dtype=np.int32) - - for dps_key in hdf5_file[key].keys(): - if dps_key not in ['data', 'offsets', 'lengths']: - new_group.create_dataset(key, data=hdf5_file[key][dps_key]) - new_group.create_dataset('commit_weights', data=tmp_commit_weights) - new_hdf5_file.close() + with h5py.File(new_filename, 'w') as new_hdf5_file: + new_hdf5_file.attrs['affine'] = sft.affine + new_hdf5_file.attrs['dimensions'] = sft.dimensions + new_hdf5_file.attrs['voxel_sizes'] = sft.voxel_sizes + new_hdf5_file.attrs['voxel_order'] = sft.voxel_order + # Assign the weights into the hdf5, while respecting the ordering of + # connections/streamlines + logging.debug('Adding commit weights to {}.'.format(new_filename)) + for i, key in enumerate(hdf5_keys): + new_group = new_hdf5_file.create_group(key) + old_group = hdf5_file[key] + tmp_commit_weights = commit_weights[offsets_list[i]:offsets_list[i+1]] + if args.threshold_weights is not None: + essential_ind = np.where( + tmp_commit_weights > args.threshold_weights)[0] + tmp_streamlines = reconstruct_streamlines(old_group['data'], + old_group['offsets'], + old_group['lengths'], + indices=essential_ind) + + # Replacing the data with the one above the threshold + # Safe since this hdf5 was a copy in the first place + new_group.create_dataset('data', + data=tmp_streamlines.get_data(), + dtype=np.float32) + new_group.create_dataset('offsets', + data=tmp_streamlines._offsets, + dtype=np.int64) + new_group.create_dataset('lengths', + data=tmp_streamlines._lengths, + dtype=np.int32) + + for dps_key in hdf5_file[key].keys(): + if dps_key not in ['data', 'offsets', 'lengths']: + new_group.create_dataset(key, + data=hdf5_file[key][dps_key]) + new_group.create_dataset('commit_weights', + data=tmp_commit_weights) files = os.listdir(commit_results_dir) for f in files: @@ -433,9 +436,6 @@ def main(): output_filename)) nib.streamlines.save(tractogram_file, output_filename) - # Cleanup the temporary directory - if ext == '.h5': - hdf5_file.close() tmp_dir.cleanup() diff --git a/scripts/scil_save_connections_from_hdf5.py b/scripts/scil_save_connections_from_hdf5.py index a97cf23d6..52c2fb106 100755 --- a/scripts/scil_save_connections_from_hdf5.py +++ b/scripts/scil_save_connections_from_hdf5.py @@ -76,48 +76,45 @@ def main(): if args.save_empty and args.labels_list is None: parser.error("The option --save_empty requires --labels_list.") - hdf5_file = h5py.File(args.in_hdf5, 'r') - - if args.save_empty: - all_labels = np.loadtxt(args.labels_list, dtype='str') - comb_list = list(itertools.combinations(all_labels, r=2)) - comb_list.extend(zip(all_labels, all_labels)) - keys = [i[0]+'_'+i[1] for i in comb_list] - else: - keys = hdf5_file.keys() - - if args.edge_keys is not None: - selected_keys = [key for key in keys if key in args.edge_keys] - elif args.node_keys is not None: - selected_keys = [] - for node in args.node_keys: - selected_keys.extend([key for key in keys - if key.startswith(node + '_') - or key.endswith('_' + node)]) - else: - selected_keys = keys - - affine = hdf5_file.attrs['affine'] - dimensions = hdf5_file.attrs['dimensions'] - voxel_sizes = hdf5_file.attrs['voxel_sizes'] - header = create_nifti_header(affine, dimensions, voxel_sizes) - for key in selected_keys: - streamlines = reconstruct_streamlines_from_hdf5(hdf5_file, key) - - if len(streamlines) == 0 and not args.save_empty: - continue - - sft = StatefulTractogram(streamlines, header, Space.VOX, - origin=Origin.TRACKVIS) - if args.include_dps: - for dps_key in hdf5_file[key].keys(): - if dps_key not in ['data', 'offsets', 'lengths']: - sft.data_per_streamline[dps_key] = hdf5_file[key][dps_key] - - save_tractogram(sft, '{}.trk' - .format(os.path.join(args.out_dir, key))) - - hdf5_file.close() + with h5py.File(args.in_hdf5, 'r') as hdf5_file: + if args.save_empty: + all_labels = np.loadtxt(args.labels_list, dtype='str') + comb_list = list(itertools.combinations(all_labels, r=2)) + comb_list.extend(zip(all_labels, all_labels)) + keys = [i[0]+'_'+i[1] for i in comb_list] + else: + keys = hdf5_file.keys() + + if args.edge_keys is not None: + selected_keys = [key for key in keys if key in args.edge_keys] + elif args.node_keys is not None: + selected_keys = [] + for node in args.node_keys: + selected_keys.extend([key for key in keys + if key.startswith(node + '_') + or key.endswith('_' + node)]) + else: + selected_keys = keys + + affine = hdf5_file.attrs['affine'] + dimensions = hdf5_file.attrs['dimensions'] + voxel_sizes = hdf5_file.attrs['voxel_sizes'] + header = create_nifti_header(affine, dimensions, voxel_sizes) + for key in selected_keys: + streamlines = reconstruct_streamlines_from_hdf5(hdf5_file, key) + + if len(streamlines) == 0 and not args.save_empty: + continue + + sft = StatefulTractogram(streamlines, header, Space.VOX, + origin=Origin.TRACKVIS) + if args.include_dps: + for dps_key in hdf5_file[key].keys(): + if dps_key not in ['data', 'offsets', 'lengths']: + sft.data_per_streamline[dps_key] = hdf5_file[key][dps_key] + + save_tractogram(sft, '{}.trk' + .format(os.path.join(args.out_dir, key))) if __name__ == "__main__": From d100260261249bba3d50ff73f380b411c6109dd0 Mon Sep 17 00:00:00 2001 From: frheault Date: Wed, 19 Aug 2020 11:03:48 -0400 Subject: [PATCH 17/40] Check for mean_fixel --- scripts/scil_compute_mean_fixel_afd_from_hdf5.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/scripts/scil_compute_mean_fixel_afd_from_hdf5.py b/scripts/scil_compute_mean_fixel_afd_from_hdf5.py index e77a82544..d5a5aa400 100755 --- a/scripts/scil_compute_mean_fixel_afd_from_hdf5.py +++ b/scripts/scil_compute_mean_fixel_afd_from_hdf5.py @@ -100,14 +100,21 @@ def main(): assert_inputs_exist(parser, [args.in_hdf5, args.in_fodf]) assert_outputs_exist(parser, args, [args.out_hdf5]) + nbr_cpu = validate_nbr_processes(parser, args, args.nbr_processes) + # HDF5 will not overwrite the file if os.path.isfile(args.out_hdf5): os.remove(args.out_hdf5) fodf_img = nib.load(args.in_fodf) - - nbr_cpu = validate_nbr_processes(parser, args, args.nbr_processes) in_hdf5_file = h5py.File(args.in_hdf5, 'r') + if not (np.allclose(in_hdf5_file.attrs['affine'], fodf_img.affine, + atol=1e-03) + and np.array_equal(in_hdf5_file.attrs['dimensions'], + fodf_img.shape[0:3])): + parser.error('{} does not have a compatible header with {}'.format( + args.in_hdf5, args.in_fodf)) + keys = list(in_hdf5_file.keys()) in_hdf5_file.close() if nbr_cpu == 1: From 3a1af135c308fda73d20ed9e157f1737e7ca10a2 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Wed, 19 Aug 2020 12:36:25 -0400 Subject: [PATCH 18/40] add the possibility to add options to eddy or topup --- scilpy/io/utils.py | 43 +++++++++++++++++++++++++++ scripts/scil_prepare_eddy_command.py | 8 +++++ scripts/scil_prepare_topup_command.py | 16 ++++++++-- 3 files changed, 64 insertions(+), 3 deletions(-) diff --git a/scilpy/io/utils.py b/scilpy/io/utils.py index 5c1a3e780..73a5ba023 100644 --- a/scilpy/io/utils.py +++ b/scilpy/io/utils.py @@ -11,6 +11,17 @@ from scilpy.utils.bvec_bval_tools import DEFAULT_B0_THRESHOLD +eddy_options = ["mb", "mb_offs", "slspec", "mporder", "s2v_lambda", "field", + "field_mat", "flm", "slm", "fwhm", "niter", "s2v_niter", + "cnr_maps", "residuals", "fep", "interp", "s2v_interp", + "resamp", "nvoxhp", "ff", "ol_nstd", "ol_nvox", "ol_type", + "ol_pos", "ol_sqr", "dont_sep_offs_move", "dont_peas"] + +topup_options = ['out', 'fout', 'iout', 'logout', 'warpres', 'subsamp', 'fwhm', + 'config', 'miter', 'lambda', 'ssqlambda', 'regmod', 'estmov', + "minmet", 'splineorder', 'numprec', 'interp', 'scale', + 'regrid'] + def link_bundles_and_reference(parser, args, input_tractogram_list): """ @@ -430,3 +441,35 @@ def save_matrix_in_any_format(filepath, output_data): np.save('{}.npy'.format(filepath), output_data) else: raise ValueError('Extension {} is not supported'.format(ext)) + + +def assert_fsl_options_exist(parser, options_args, command='topup'): + """ + Assert that all options for topup or eddy exist. + If not, print parser's usage and exit. + + Parameters + ---------- + parser: argparse.ArgumentParser object + Parser. + options_args: string + Options for fsl command + command: string + Command used (eddy or topup). + """ + if command == 'eddy': + fsl_options = eddy_options + elif command == 'topup': + fsl_options = topup_options + else: + parser.error('{} command is not supported as fsl ' + 'command.'.format(command)) + + options = options_args.split() + res = [i for i in options if "--" in i] + res = list(map(lambda x: x.replace('--', ''), res)) + + for nOption in res: + if nOption not in fsl_options: + parser.error('--{} is not a valid option for ' + '{} command.'.format(nOption, command)) diff --git a/scripts/scil_prepare_eddy_command.py b/scripts/scil_prepare_eddy_command.py index 7b4ebe765..0f9d1453b 100755 --- a/scripts/scil_prepare_eddy_command.py +++ b/scripts/scil_prepare_eddy_command.py @@ -12,6 +12,7 @@ from dipy.io.gradients import read_bvals_bvecs import numpy as np from scilpy.io.utils import (add_overwrite_arg, add_verbose_arg, + assert_fsl_options_exist, assert_inputs_exist) from scilpy.preprocessing.distortion_correction import (create_acqparams, create_index, @@ -77,6 +78,9 @@ def _build_arg_parser(): help='if set, will use the fixed seed strategy for eddy.\n' 'Enhances reproducibility.') + p.add_argument('--eddy_options', default=None, + help='Additional options you want to use to run eddy.') + add_overwrite_arg(p) add_verbose_arg(p) @@ -99,6 +103,7 @@ def main(): required_args = [args.in_dwi, args.in_bvals, args.in_bvecs, args.in_mask] assert_inputs_exist(parser, required_args) + assert_fsl_options_exist(parser, args.eddy_options, 'eddy') if os.path.splitext(args.out_prefix)[1] != '': parser.error('The prefix must not contain any extension.') @@ -137,6 +142,9 @@ def main(): if args.fix_seed: additional_args += "--initrand " + if args.eddy_options: + additional_args += args.eddy_options + output_path = os.path.join(args.out_directory, args.out_prefix) eddy = '{0} --imain={1} --mask={2} --acqp={3} --index={4}' \ ' --bvecs={5} --bvals={6} --out={7} --data_is_shelled {8}' \ diff --git a/scripts/scil_prepare_topup_command.py b/scripts/scil_prepare_topup_command.py index fb06c2111..056d08797 100755 --- a/scripts/scil_prepare_topup_command.py +++ b/scripts/scil_prepare_topup_command.py @@ -15,7 +15,8 @@ import nibabel as nib import numpy as np from scilpy.io.utils import (add_overwrite_arg, add_verbose_arg, - assert_inputs_exist, assert_outputs_exist) + assert_inputs_exist, assert_outputs_exist, + assert_fsl_options_exist) from scilpy.preprocessing.distortion_correction import create_acqparams @@ -66,6 +67,9 @@ def _build_arg_parser(): 'else, will output the lines to the ' + 'terminal [%(default)s].') + p.add_argument('--topup_options', default=None, + help='Additional options you want to use to run topup.') + add_overwrite_arg(p) add_verbose_arg(p) @@ -84,6 +88,7 @@ def main(): assert_inputs_exist(parser, required_args) assert_outputs_exist(parser, args, [], args.out_b0s) + assert_fsl_options_exist(parser, args.topup_options) if os.path.splitext(args.out_prefix)[1] != '': parser.error('The prefix must not contain any extension.') @@ -139,11 +144,16 @@ def main(): output_path = os.path.join(args.out_directory, args.out_prefix) fout_path = os.path.join(args.out_directory, "correction_field") iout_path = os.path.join(args.out_directory, "corrected_b0s") + + additional_args = "" + if args.topup_options: + additional_args = args.topup_options + topup = 'topup --imain={0} --datain={1}'\ ' --config={2} --verbose --out={3}'\ - ' --fout={4} --iout={5} --subsamp=1 \n'\ + ' --fout={4} --iout={5} --subsamp=1 {6}\n'\ .format(fused_b0s_path, acqparams_path, args.config, output_path, - fout_path, iout_path) + fout_path, iout_path, additional_args) if args.out_script: with open("topup.sh", 'w') as f: From 38f4b74163a72c49a4719c947ab2d328633cbe74 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Wed, 19 Aug 2020 12:37:52 -0400 Subject: [PATCH 19/40] remove default value command --- scilpy/io/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scilpy/io/utils.py b/scilpy/io/utils.py index 73a5ba023..41b783262 100644 --- a/scilpy/io/utils.py +++ b/scilpy/io/utils.py @@ -443,7 +443,7 @@ def save_matrix_in_any_format(filepath, output_data): raise ValueError('Extension {} is not supported'.format(ext)) -def assert_fsl_options_exist(parser, options_args, command='topup'): +def assert_fsl_options_exist(parser, options_args, command): """ Assert that all options for topup or eddy exist. If not, print parser's usage and exit. From b48899a3a2def9e648b4453dde4fede957fcdfc1 Mon Sep 17 00:00:00 2001 From: frheault Date: Wed, 19 Aug 2020 14:55:34 -0400 Subject: [PATCH 20/40] Memory safe operation --- scilpy/image/operations.py | 315 +++++++++++++++---------- scripts/scil_image_math.py | 49 ++-- scripts/scil_split_tractograms.py | 27 ++- scripts/scil_split_volume_by_ids.py | 12 +- scripts/scil_split_volume_by_labels.py | 25 +- scripts/tests/test_image_math.py | 2 +- 6 files changed, 254 insertions(+), 176 deletions(-) diff --git a/scilpy/image/operations.py b/scilpy/image/operations.py index 316aab961..30c89e270 100644 --- a/scilpy/image/operations.py +++ b/scilpy/image/operations.py @@ -10,6 +10,7 @@ from copy import copy import logging +import nibabel as nib import numpy as np from scipy.ndimage.filters import gaussian_filter from scipy.ndimage.morphology import (binary_closing, binary_dilation, @@ -54,7 +55,7 @@ def get_image_ops(): """Get a dictionary of all functions relating to image operations""" image_ops = get_array_ops() image_ops.update(OrderedDict([ - ('concat', concat), + ('concatenate', concat), ('dilation', dilation), ('erosion', erosion), ('closing', closing), @@ -73,12 +74,13 @@ def get_operations_doc(ops: dict): return "".join(full_doc) -def _validate_arrays(*arrays): - """Make sure that all inputs are arrays, and that their shapes match.""" - ref_array = arrays[0] - for array in arrays: - if isinstance(array, np.ndarray) and \ - not np.all(ref_array.shape == array.shape): +def _validate_imgs(*imgs): + """Make sure that all inputs are images, and that their shapes match.""" + ref_img = imgs[-1] + for img in imgs: + if isinstance(img, nib.Nifti1Image) and \ + not np.all(ref_img.header.get_data_shape() == + img.header.get_data_shape()): raise ValueError('Not all inputs have the same shape!') @@ -97,8 +99,8 @@ def _validate_length(input_list, length, at_least=False): raise ValueError -def _validate_dtype(x, dtype): - """Make sure that the input has the right datatype.""" +def _validate_type(x, dtype): + """Make sure that the input has the right type.""" if not isinstance(x, dtype): logging.error( 'The input must be of type {} for this operation.'.format(dtype)) @@ -112,24 +114,25 @@ def _validate_float(x): raise ValueError -def lower_threshold(input_list): +def lower_threshold(input_list, ref_img): """ lower_threshold: IMG THRESHOLD All values below the threshold will be set to zero. All values above the threshold will be set to one. """ _validate_length(input_list, 2) - _validate_dtype(input_list[0], np.ndarray) + _validate_type(input_list[0], nib.Nifti1Image) _validate_float(input_list[1]) - output_data = copy(input_list[0]) - output_data[input_list[0] < input_list[1]] = 0 - output_data[input_list[0] >= input_list[1]] = 1 + output_data = np.zeros(ref_img.header.get_data_shape(), dtype=np.float64) + data = input_list[0].get_fdata(dtype=np.float64) + output_data[data < input_list[1]] = 0 + output_data[data >= input_list[1]] = 1 return output_data -def upper_threshold(input_list): +def upper_threshold(input_list, ref_img): """ upper_threshold: IMG THRESHOLD All values below the threshold will be set to one. @@ -137,228 +140,264 @@ def upper_threshold(input_list): Equivalent to lower_threshold followed by an inversion. """ _validate_length(input_list, 2) - _validate_dtype(input_list[0], np.ndarray) + _validate_type(input_list[0], nib.Nifti1Image) _validate_float(input_list[1]) - output_data = copy(input_list[0]) - output_data[input_list[0] <= input_list[1]] = 1 - output_data[input_list[0] > input_list[1]] = 0 + output_data = np.zeros(ref_img.header.get_data_shape(), dtype=np.float64) + data = input_list[0].get_fdata(dtype=np.float64) + output_data[data <= input_list[1]] = 1 + output_data[data > input_list[1]] = 0 return output_data -def lower_clip(input_list): +def lower_clip(input_list, ref_img): """ lower_clip: IMG THRESHOLD All values below the threshold will be set to threshold. """ _validate_length(input_list, 2) - _validate_dtype(input_list[0], np.ndarray) + _validate_type(input_list[0], nib.Nifti1Image) _validate_float(input_list[1]) - return np.clip(input_list[0], input_list[1], None) + return np.clip(input_list[0].get_fdata(dtype=np.float64), + input_list[1], None) -def upper_clip(input_list): +def upper_clip(input_list, ref_img): """ upper_clip: IMG THRESHOLD All values above the threshold will be set to threshold. """ _validate_length(input_list, 2) - _validate_dtype(input_list[0], np.ndarray) + _validate_type(input_list[0], nib.Nifti1Image) _validate_float(input_list[1]) - return np.clip(input_list[0], None, input_list[1]) + return np.clip(input_list[0].get_fdata(dtype=np.float64), + None, input_list[1]) -def absolute_value(input_list): +def absolute_value(input_list, ref_img): """ absolute_value: IMG All negative values will become positive. """ _validate_length(input_list, 1) - _validate_dtype(input_list[0], np.ndarray) + _validate_type(input_list[0], nib.Nifti1Image) - return np.abs(input_list[0]) + return np.abs(input_list[0].get_fdata(dtype=np.float64)) -def around(input_list): +def around(input_list, ref_img): """ round: IMG Round all decimal values to the closest integer. """ _validate_length(input_list, 1) - _validate_dtype(input_list[0], np.ndarray) + _validate_type(input_list[0], nib.Nifti1Image) - return np.round(input_list[0]) + return np.round(input_list[0].get_fdata(dtype=np.float64)) -def ceil(input_list): +def ceil(input_list, ref_img): """ ceil: IMG Ceil all decimal values to the next integer. """ _validate_length(input_list, 1) - _validate_dtype(input_list[0], np.ndarray) + _validate_type(input_list[0], nib.Nifti1Image) - return np.ceil(input_list[0]) + return np.ceil(input_list[0].get_fdata(dtype=np.float64)) -def floor(input_list): +def floor(input_list, ref_img): """ floor: IMG Floor all decimal values to the previous integer. """ _validate_length(input_list, 1) - _validate_dtype(input_list[0], np.ndarray) + _validate_type(input_list[0], nib.Nifti1Image) - return np.floor(input_list[0]) + return np.floor(input_list[0].get_fdata(dtype=np.float64)) -def normalize_sum(input_list): +def normalize_sum(input_list, ref_img): """ normalize_sum: IMG Normalize the image so the sum of all values is one. """ _validate_length(input_list, 1) - _validate_dtype(input_list[0], np.ndarray) + _validate_type(input_list[0], nib.Nifti1Image) - return copy(input_list[0]) / np.sum(input_list[0]) + data = input_list[0].get_fdata(dtype=np.float64) + return copy(data) / np.sum(data) -def normalize_max(input_list): +def normalize_max(input_list, ref_img): """ normalize_max: IMG Normalize the image so the maximum value is one. """ _validate_length(input_list, 1) - _validate_dtype(input_list[0], np.ndarray) + _validate_type(input_list[0], nib.Nifti1Image) - return copy(input_list[0]) / np.max(input_list[0]) + data = input_list[0].get_fdata(dtype=np.float64) + return copy(data) / np.max(data) -def base_10_log(input_list): +def base_10_log(input_list, ref_img): """ log_10: IMG Apply a log (base 10) to all non zeros values of an image. """ _validate_length(input_list, 1) - _validate_dtype(input_list[0], np.ndarray) + _validate_type(input_list[0], nib.Nifti1Image) - output_data = np.zeros(input_list[0].shape) - output_data[input_list[0] > EPSILON] = np.log10( - input_list[0][input_list[0] > EPSILON]) + data = input_list[0].get_fdata(dtype=np.float64) + output_data = np.zeros(data.shape, dtype=np.float64) + output_data[data > EPSILON] = np.log10(data[data > EPSILON]) output_data[np.abs(output_data) < EPSILON] = -65536 return output_data -def natural_log(input_list): +def natural_log(input_list, ref_img): """ log_e: IMG Apply a natural log to all non zeros values of an image. """ _validate_length(input_list, 1) - _validate_dtype(input_list[0], np.ndarray) + _validate_type(input_list[0], nib.Nifti1Image) - output_data = np.zeros(input_list[0].shape) - output_data[input_list[0] > EPSILON] = np.log( - input_list[0][input_list[0] > EPSILON]) + data = input_list[0].get_fdata(dtype=np.float64) + output_data = np.zeros(data.shape, dtype=np.float64) + output_data[data > EPSILON] = np.log(data[data > EPSILON]) output_data[np.abs(output_data) < EPSILON] = -65536 return output_data -def convert(input_list): +def convert(input_list, ref_img): """ convert: IMG Perform no operation, but simply change the data type. """ _validate_length(input_list, 1) - _validate_dtype(input_list[0], np.ndarray) + _validate_type(input_list[0], nib.Nifti1Image) - return copy(input_list[0]) + return copy(input_list[0].get_fdata(dtype=np.float64)) -def addition(input_list): +def addition(input_list, ref_img): """ addition: IMGs Add multiple images together. """ _validate_length(input_list, 2, at_least=True) - _validate_arrays(*input_list) - ref_array = input_list[0] + _validate_imgs(*input_list, ref_img) - output_data = np.zeros(ref_array.shape) - for data in input_list: - output_data += data + output_data = np.zeros(ref_img.header.get_data_shape(), dtype=np.float64) + for img in input_list: + if isinstance(img, nib.Nifti1Image): + data = img.get_fdata(dtype=np.float64) + output_data += data + img.uncache() + else: + output_data += img return output_data -def subtraction(input_list): +def subtraction(input_list, ref_img): """ subtraction: IMG_1 IMG_2 Subtract first image by the second (IMG_1 - IMG_2). """ _validate_length(input_list, 2) - _validate_arrays(*input_list) + _validate_imgs(*input_list, ref_img) - return input_list[0] - input_list[1] + output_data = np.zeros(ref_img.header.get_data_shape(), dtype=np.float64) + if isinstance(input_list[0], nib.Nifti1Image): + data_1 = input_list[0].get_fdata(dtype=np.float64) + else: + data_1 = input_list[0] + if isinstance(input_list[1], nib.Nifti1Image): + data_2 = input_list[1].get_fdata(dtype=np.float64) + else: + data_2 = input_list[1] + output_data += data_1 + return output_data - data_2 -def multiplication(input_list): + +def multiplication(input_list, ref_img): """ multiplication: IMGs Multiply multiple images together (danger of underflow and overflow) """ _validate_length(input_list, 2, at_least=True) - _validate_arrays(*input_list) + _validate_imgs(*input_list, ref_img) - output_data = input_list[0] - for data in input_list[1:]: - output_data *= data + output_data = np.ones(ref_img.header.get_data_shape()) + if isinstance(input_list[0], nib.Nifti1Image): + output_data *= input_list[0].get_fdata(dtype=np.float64) + else: + output_data *= input_list[0] + for img in input_list[1:]: + if isinstance(img, nib.Nifti1Image): + data = img.get_fdata(dtype=np.float64) + output_data *= data + img.uncache() + else: + output_data *= img return output_data -def division(input_list): +def division(input_list, ref_img): """ division: IMG_1 IMG_2 Divide first image by the second (danger of underflow and overflow) Ignore zeros values, excluded from the operation. """ _validate_length(input_list, 2) - _validate_arrays(*input_list) - ref_array = input_list[0] + _validate_imgs(*input_list, ref_img) + + output_data = np.zeros(ref_img.header.get_data_shape(), dtype=np.float64) + if isinstance(input_list[0], nib.Nifti1Image): + data_1 = input_list[0].get_fdata(dtype=np.float64) + else: + data_1 = input_list[0] + if isinstance(input_list[1], nib.Nifti1Image): + data_2 = input_list[1].get_fdata(dtype=np.float64) + else: + data_2 = input_list[1] - output_data = np.zeros(ref_array.shape) - output_data[input_list[1] != 0] = input_list[0][input_list[1] != 0] \ - / input_list[1][input_list[1] > 0] + output_data += data_1 + output_data[data_2 != 0] /= data_2[data_2 != 0] return output_data -def mean(input_list): +def mean(input_list, ref_img): """ mean: IMGs Compute the mean of images. If a single 4D image is provided, average along the last dimension. """ _validate_length(input_list, 1, at_least=True) - _validate_arrays(*input_list) - ref_array = input_list[0] + _validate_imgs(*input_list, ref_img) - if len(input_list) == 1 and not ref_array.ndim > 3: + if len(input_list) == 1 and not len(input_list[0].header.get_data_shape()) > 3: logging.error('This operation with only one operand requires 4D data.') raise ValueError - in_data = np.squeeze(np.rollaxis(np.array(input_list), 0, - input_list[0].ndim+1)) - - return np.average(in_data, axis=-1) + if len(input_list[0].header.get_data_shape()) > 3: + return np.average(input_list[0].get_fdata(dtype=np.float64), axis=-1) + else: + return addition(input_list, ref_img) / len(input_list) -def std(input_list): +def std(input_list, ref_img): """ std: IMGs Compute the standard deviation average of multiple images. @@ -366,142 +405,170 @@ def std(input_list): dimension. """ _validate_length(input_list, 1, at_least=True) - _validate_arrays(*input_list) - ref_array = input_list[0] + _validate_imgs(*input_list, ref_img) - if len(input_list) == 1 and not ref_array.ndim > 3: + if len(input_list) == 1 and not len(input_list[0].header.get_data_shape()) > 3: logging.error('This operation with only one operand requires 4D data.') raise ValueError - in_data = np.squeeze(np.rollaxis(np.array(input_list), 0, - input_list[0].ndim+1)) - - return np.std(in_data, axis=-1) + if len(input_list[0].header.get_data_shape()) > 3: + return np.average(input_list[0].get_fdata(dtype=np.float64), axis=-1) + else: + mean_data = mean(input_list, ref_img) + output_data = np.zeros(input_list[0].header.get_data_shape()) + for img in input_list: + if isinstance(img, nib.Nifti1Image): + data = img.get_fdata(dtype=np.float64) + output_data += (data - mean_data) ** 2 + img.uncache() + else: + output_data += (img - mean_data) ** 2 + return np.sqrt(output_data / (len(input_list) - 1)) -def union(input_list): +def union(input_list, ref_img): """ union: IMGs Operation on binary image to keep voxels, that are non-zero, in at least one file. """ - output_data = addition(input_list) + output_data = addition(input_list, ref_img) output_data[output_data != 0] = 1 return output_data -def intersection(input_list): +def intersection(input_list, ref_img): """ intersection: IMGs Operation on binary image to keep the voxels, that are non-zero, are present in all files. """ - output_data = multiplication(input_list) + output_data = multiplication(input_list, ref_img) output_data[output_data != 0] = 1 return output_data -def difference(input_list): +def difference(input_list, ref_img): """ difference: IMG_1 IMG_2 Operation on binary image to keep voxels from the first file that are not in the second file (non-zeros). """ _validate_length(input_list, 2) - _validate_arrays(*input_list) + _validate_imgs(*input_list, ref_img) - output_data = copy(input_list[0]).astype(np.bool) - output_data[input_list[1] != 0] = 0 + output_data = np.zeros(ref_img.header.get_data_shape(), dtype=np.float64) + if isinstance(input_list[0], nib.Nifti1Image): + data_1 = input_list[0].get_fdata(dtype=np.float64) + else: + data_1 = input_list[0] + if isinstance(input_list[1], nib.Nifti1Image): + data_2 = input_list[1].get_fdata(dtype=np.float64) + else: + data_2 = input_list[1] + output_data[data_1 != 0] = 1 + output_data[data_2 != 0] = 0 return output_data -def invert(input_list): +def invert(input_list, ref_img): """ invert: IMG Operation on binary image to interchange 0s and 1s in a binary mask. """ _validate_length(input_list, 1) - _validate_arrays(*input_list) + _validate_type(input_list[0], nib.Nifti1Image) - output_data = np.zeros(input_list[0].shape) - output_data[input_list[0] != 0] = 0 - output_data[input_list[0] == 0] = 1 + data = input_list[0].get_fdata(dtype=np.float64) + output_data = np.zeros(data.shape, dtype=np.float64) + output_data[data != 0] = 0 + output_data[data == 0] = 1 return output_data -def concat(input_list): +def concat(input_list, ref_img): """ concat: IMGs Concatenate a list of 3D images into a single 4D image. """ - _validate_arrays(*input_list) - if input_list[0].ndim != 3: + _validate_imgs(*input_list, ref_img) + if len(input_list[0].header.get_data_shape()) != 3: raise ValueError('Concatenate require 3D arrays.') - return np.rollaxis(np.stack(input_list), axis=0, start=4) + input_data = [] + for img in input_list: + data = img.get_fdata(dtype=np.float64) + input_data.append(data) + img.uncache() + return np.rollaxis(np.stack(input_data), axis=0, start=4) -def dilation(input_list): +def dilation(input_list, ref_img): """ dilation: IMG, VALUE Binary morphological operation to spatially extend the values of an image to their neighbors. """ _validate_length(input_list, 2) - _validate_arrays(input_list[0]) + _validate_type(input_list[0], nib.Nifti1Image) _validate_float(input_list[1]) - return binary_dilation(input_list[0], iterations=int(input_list[1])) + return binary_dilation(input_list[0].get_fdata(dtype=np.float64), + iterations=int(input_list[1])) -def erosion(input_list): +def erosion(input_list, ref_img): """ erosion: IMG, VALUE Binary morphological operation to spatially shrink the volume contained in a binary image. """ _validate_length(input_list, 2) - _validate_arrays(input_list[0]) + _validate_type(input_list[0], nib.Nifti1Image) _validate_float(input_list[1]) - return binary_erosion(input_list[0], iterations=int(input_list[1])) + return binary_erosion(input_list[0].get_fdata(dtype=np.float64), + iterations=int(input_list[1])) -def closing(input_list): +def closing(input_list, ref_img): """ closing: IMG, VALUE Binary morphological operation, dilation followed by an erosion. """ _validate_length(input_list, 2) - _validate_arrays(input_list[0]) + _validate_type(input_list[0], nib.Nifti1Image) _validate_float(input_list[1]) - return binary_closing(input_list[0], iterations=int(input_list[1])) + return binary_closing(input_list[0].get_fdata(dtype=np.float64), + iterations=int(input_list[1])) -def opening(input_list): +def opening(input_list, ref_img): """ opening: IMG, VALUE Binary morphological operation, erosion followed by a dilation. """ _validate_length(input_list, 2) - _validate_arrays(input_list[0]) + _validate_type(input_list[0], nib.Nifti1Image) _validate_float(input_list[1]) - return binary_opening(input_list[0], iterations=int(input_list[1])) + return binary_opening(input_list[0].get_fdata(dtype=np.float64), + iterations=int(input_list[1])) -def gaussian_blur(input_list): +def gaussian_blur(input_list, ref_img): """ blur: IMG, VALUE Apply a gaussian blur to a single image. """ _validate_length(input_list, 2) - _validate_arrays(input_list[0]) + _validate_type(input_list[0], nib.Nifti1Image) _validate_float(input_list[1]) - return gaussian_filter(input_list[0], sigma=input_list[1]) + return gaussian_filter(input_list[0].get_fdata(dtype=np.float64), + sigma=input_list[1]) diff --git a/scripts/scil_image_math.py b/scripts/scil_image_math.py index 73287b93d..b2ef86e39 100755 --- a/scripts/scil_image_math.py +++ b/scripts/scil_image_math.py @@ -58,24 +58,27 @@ def _build_arg_parser(): return p -def load_data(arg): +def load_data(arg, ref): if is_float(arg): - data = float(arg) + img = float(arg) + dtype = np.float64 else: if not os.path.isfile(arg): raise ValueError('Input file {} does not exist.'.format(arg)) - data = np.asanyarray(nib.load(arg).dataobj) + img = nib.load(arg) + shape = img.header.get_data_shape() + dtype = img.header.get_data_dtype() logging.info('Loaded {} of shape {} and data_type {}.'.format( - arg, data.shape, data.dtype)) + arg, shape, dtype)) - if data.ndim > 3: + if len(shape) > 3: logging.warning('{} has {} dimensions, be careful.'.format( - arg, data.ndim)) - elif data.ndim < 3: + arg, len(shape))) + elif len(shape) < 3: raise ValueError('{} has {} dimensions, not valid.'.format( - arg, data.ndim)) + arg, len(shape))) - return data + return img, dtype def main(): @@ -96,25 +99,31 @@ def main(): # Find at least one image for reference for input_arg in args.in_images: + found_ref = False if not is_float(input_arg): ref_img = nib.load(input_arg) mask = np.zeros(ref_img.shape) + found_ref = True break + if not found_ref: + raise ValueError('Requires at least one nifti image.') + # Load all input masks. - input_data = [] + input_img = [] for input_arg in args.in_images: if not is_float(input_arg) and \ not is_header_compatible(ref_img, input_arg): parser.error('Inputs do not have a compatible header.') - data = load_data(input_arg) + img, dtype = load_data(input_arg, ref_img) - if isinstance(data, np.ndarray) and \ - data.dtype != ref_img.get_data_dtype() and \ + if isinstance(img, nib.Nifti1Image) and \ + dtype != ref_img.get_data_dtype() and \ not args.data_type: parser.error('Inputs do not have a compatible data type.\n' 'Use --data_type to specify output datatype.') - if args.operation in binary_op and isinstance(data, np.ndarray): + if args.operation in binary_op and isinstance(img, nib.Nifti1Image): + data = img.get_fdata(dtype=np.float64) unique = np.unique(data) if not len(unique) <= 2: parser.error('Binary operations can only be performed with ' @@ -124,18 +133,18 @@ def main(): logging.warning('Input data for binary operation are not ' 'binary arrays, will be converted.\n' 'Non-zeros will be set to ones.') - data[data != 0] = 1 - if isinstance(data, np.ndarray): - data = data.astype(np.float64) + if isinstance(img, nib.Nifti1Image): + data = img.get_fdata(dtype=np.float64) mask[data > 0] = 1 - input_data.append(data) + img.uncache() + input_img.append(img) if args.operation == 'convert' and not args.data_type: parser.error('Convert operation must be used with --data_type.') try: - output_data = OPERATIONS[args.operation](input_data) + output_data = OPERATIONS[args.operation](input_img, ref_img) except ValueError as msg: logging.error('{} operation failed.'.format( args.operation.capitalize())) @@ -150,7 +159,7 @@ def main(): if args.exclude_background: output_data[mask == 0] = 0 - + new_img = nib.Nifti1Image(output_data, ref_img.affine, header=ref_img.header) nib.save(new_img, args.out_image) diff --git a/scripts/scil_split_tractograms.py b/scripts/scil_split_tractograms.py index 293a78e9d..cfb221167 100755 --- a/scripts/scil_split_tractograms.py +++ b/scripts/scil_split_tractograms.py @@ -15,8 +15,9 @@ import numpy as np from scilpy.io.streamlines import load_tractogram_with_reference -from scilpy.io.utils import (add_overwrite_arg, assert_inputs_exist, - assert_outputs_exist, add_reference_arg) +from scilpy.io.utils import (add_overwrite_arg, add_reference_arg, + assert_inputs_exist, assert_outputs_exist, + assert_output_dirs_exist_and_empty) def _build_arg_parser(): @@ -26,14 +27,16 @@ def _build_arg_parser(): p.add_argument('in_tractogram', help='Tractogram input file name.') - p.add_argument('out_tractogram', - help='Output filename, with extension needed,' - 'index will be appended automatically.') + p.add_argument('out_prefix', + help='Prefix for the output tractogram,' + 'index will be appended automatically.') + + p.add_argument('--out_dir', default='', + help='Put all ouptput tractogram in a specific directory.') group = p.add_mutually_exclusive_group(required=True) group.add_argument('--chunk_size', type=int, help='The maximum number of streamlines per file.') - group.add_argument('--nb_chunk', type=int, help='Divide the file in equal parts.') @@ -48,13 +51,15 @@ def main(): args = parser.parse_args() assert_inputs_exist(parser, args.in_tractogram) - out_basename, out_extension = os.path.splitext(args.out_tractogram) + _, out_extension = os.path.splitext(args.in_tractogram) + assert_output_dirs_exist_and_empty(parser, args, [], optional=args.out_dir) # Check only the first potential output filename - assert_outputs_exist(parser, args, [out_basename + '_0' + out_extension]) + assert_outputs_exist(parser, args, os.path.join(args.out_dir, + '{}_0{}'.format(args.out_prefix, + out_extension))) sft = load_tractogram_with_reference(parser, args, args.in_tractogram) - streamlines_count = len(sft.streamlines) if args.nb_chunk: @@ -79,7 +84,9 @@ def main(): data_per_point=data_per_point, data_per_streamline=data_per_streamline) - out_name = '{0}_{1}{2}'.format(out_basename, i, out_extension) + out_name = os.path.join(args.out_dir, + '{0}_{1}{2}'.format(args.out_prefix, i, + out_extension)) save_tractogram(new_sft, out_name) diff --git a/scripts/scil_split_volume_by_ids.py b/scripts/scil_split_volume_by_ids.py index 9bf2552ef..be6cb18cd 100755 --- a/scripts/scil_split_volume_by_ids.py +++ b/scripts/scil_split_volume_by_ids.py @@ -16,8 +16,9 @@ import numpy as np from scilpy.io.image import get_data_as_label -from scilpy.io.utils import (add_overwrite_arg, assert_inputs_exist, - assert_outputs_exist) +from scilpy.io.utils import (add_overwrite_arg, + assert_inputs_exist, assert_outputs_exist, + assert_output_dirs_exist_and_empty) # Taken from http://stackoverflow.com/a/6512463 @@ -69,7 +70,6 @@ def _build_arg_parser(): def main(): - parser = _build_arg_parser() args = parser.parse_args() @@ -96,13 +96,11 @@ def main(): else: output_filenames.append(os.path.join(args.out_dir, '{0}.nii.gz'.format( - name))) + name))) + assert_output_dirs_exist_and_empty(parser, args, [], optional=args.out_dir) assert_outputs_exist(parser, args, output_filenames) - if args.out_dir and not os.path.isdir(args.out_dir): - os.mkdir(args.out_dir) - # Extract the voxels that match the label and save them to a file. cnt_filename = 0 for label in label_indices: diff --git a/scripts/scil_split_volume_by_labels.py b/scripts/scil_split_volume_by_labels.py index 3e67b5d10..0deca0127 100755 --- a/scripts/scil_split_volume_by_labels.py +++ b/scripts/scil_split_volume_by_labels.py @@ -20,8 +20,9 @@ import scilpy from scilpy.io.image import get_data_as_label -from scilpy.io.utils import (add_overwrite_arg, assert_inputs_exist, - assert_outputs_exist) +from scilpy.io.utils import (add_overwrite_arg, + assert_inputs_exist, assert_outputs_exist, + assert_output_dirs_exist_and_empty) def _build_arg_parser(): @@ -40,14 +41,12 @@ def _build_arg_parser(): help='Prefix to be used for each output image.') mutual_group = p.add_mutually_exclusive_group(required=True) - mutual_group.add_argument( - '--scilpy_lut', choices=luts, - help='Lookup table, in the file scilpy/data/LUT, ' - 'used to name the output files.') - mutual_group.add_argument( - '--custom_lut', - help='Path of the lookup table file, ' - 'used to name the output files.') + mutual_group.add_argument('--scilpy_lut', choices=luts, + help='Lookup table, in the file scilpy/data/LUT, ' + 'used to name the output files.') + mutual_group.add_argument('--custom_lut', + help='Path of the lookup table file, ' + 'used to name the output files.') add_overwrite_arg(p) @@ -102,13 +101,11 @@ def main(): else: output_filenames.append(os.path.join(args.out_dir, '{0}.nii.gz'.format( - name))) + name))) + assert_output_dirs_exist_and_empty(parser, args, [], optional=args.out_dir) assert_outputs_exist(parser, args, output_filenames) - if args.out_dir and not os.path.isdir(args.out_dir): - os.mkdir(args.out_dir) - # Extract the voxels that match the label and save them to a file. cnt_filename = 0 for label in label_indices: diff --git a/scripts/tests/test_image_math.py b/scripts/tests/test_image_math.py index 272f2df91..b34d82085 100644 --- a/scripts/tests/test_image_math.py +++ b/scripts/tests/test_image_math.py @@ -53,7 +53,7 @@ def test_execution_concat(script_runner): in_img_4 = os.path.join(get_home(), 'atlas', 'ids', '13.nii.gz') in_img_5 = os.path.join(get_home(), 'atlas', 'ids', '17.nii.gz') in_img_6 = os.path.join(get_home(), 'atlas', 'ids', '18.nii.gz') - ret = script_runner.run('scil_image_math.py', 'concat', + ret = script_runner.run('scil_image_math.py', 'concatenate', in_img_1, in_img_2, in_img_3, in_img_4, in_img_5, in_img_6, 'concat_ids.nii.gz') assert ret.success From 04b63c4eaa1b409a69ef556808cfb5ff71b6a62a Mon Sep 17 00:00:00 2001 From: frheault Date: Wed, 19 Aug 2020 14:58:49 -0400 Subject: [PATCH 21/40] Fix std/mean --- scilpy/image/operations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scilpy/image/operations.py b/scilpy/image/operations.py index 30c89e270..86c6a103a 100644 --- a/scilpy/image/operations.py +++ b/scilpy/image/operations.py @@ -412,7 +412,7 @@ def std(input_list, ref_img): raise ValueError if len(input_list[0].header.get_data_shape()) > 3: - return np.average(input_list[0].get_fdata(dtype=np.float64), axis=-1) + return np.std(input_list[0].get_fdata(dtype=np.float64), axis=-1) else: mean_data = mean(input_list, ref_img) output_data = np.zeros(input_list[0].header.get_data_shape()) From 058e2c78cb2838b32ed66f038228ca193f7e26f8 Mon Sep 17 00:00:00 2001 From: frheault Date: Wed, 19 Aug 2020 15:05:07 -0400 Subject: [PATCH 22/40] Fix std so its equal to numpy --- scilpy/image/operations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scilpy/image/operations.py b/scilpy/image/operations.py index 86c6a103a..7aeefe0ac 100644 --- a/scilpy/image/operations.py +++ b/scilpy/image/operations.py @@ -423,7 +423,7 @@ def std(input_list, ref_img): img.uncache() else: output_data += (img - mean_data) ** 2 - return np.sqrt(output_data / (len(input_list) - 1)) + return np.sqrt(output_data / len(input_list)) def union(input_list, ref_img): From 48ade05305d762c5d42cd7907d0ee370f9f0c835 Mon Sep 17 00:00:00 2001 From: frheault Date: Wed, 19 Aug 2020 15:36:47 -0400 Subject: [PATCH 23/40] Adapt connectivity math with nibabel --- scilpy/image/operations.py | 6 ++-- scripts/scil_connectivity_math.py | 53 ++++++++++++++++++++++--------- scripts/scil_image_math.py | 9 +++--- 3 files changed, 47 insertions(+), 21 deletions(-) diff --git a/scilpy/image/operations.py b/scilpy/image/operations.py index 7aeefe0ac..d02e7f68e 100644 --- a/scilpy/image/operations.py +++ b/scilpy/image/operations.py @@ -387,7 +387,8 @@ def mean(input_list, ref_img): _validate_length(input_list, 1, at_least=True) _validate_imgs(*input_list, ref_img) - if len(input_list) == 1 and not len(input_list[0].header.get_data_shape()) > 3: + if len(input_list) == 1 \ + and not len(input_list[0].header.get_data_shape()) > 3: logging.error('This operation with only one operand requires 4D data.') raise ValueError @@ -407,7 +408,8 @@ def std(input_list, ref_img): _validate_length(input_list, 1, at_least=True) _validate_imgs(*input_list, ref_img) - if len(input_list) == 1 and not len(input_list[0].header.get_data_shape()) > 3: + if len(input_list) == 1 \ + and not len(input_list[0].header.get_data_shape()) > 3: logging.error('This operation with only one operand requires 4D data.') raise ValueError diff --git a/scripts/scil_connectivity_math.py b/scripts/scil_connectivity_math.py index 6930bc2b0..0629116df 100755 --- a/scripts/scil_connectivity_math.py +++ b/scripts/scil_connectivity_math.py @@ -14,6 +14,7 @@ import logging import os +import nibabel as nib import numpy as np from scilpy.image.operations import (get_array_ops, get_operations_doc) @@ -41,7 +42,7 @@ def _build_arg_parser(): choices=OPERATIONS.keys(), help='The type of operation to be performed on the ' 'matrices.') - p.add_argument('inputs', nargs='+', + p.add_argument('in_matrices', nargs='+', help='The list of matrices files or parameters.') p.add_argument('out_matrix', help='Output matrix path.') @@ -49,6 +50,9 @@ def _build_arg_parser(): p.add_argument('--data_type', help='Data type of the output image. Use the format: ' 'uint8, float16, int32.') + p.add_argument('--exclude_background', action='store_true', + help='Does not affect the background of the original ' + 'matrices.') add_overwrite_arg(p) add_verbose_arg(p) @@ -56,14 +60,15 @@ def _build_arg_parser(): return p -def load_data(arg): +def load_matrix(arg): if is_float(arg): - data = float(arg) + matrix = float(arg) else: if not os.path.isfile(arg): raise ValueError('Input file {} does not exist.'.format(arg)) - data = load_matrix_in_any_format(arg) + data = load_matrix_in_any_format(arg).astype(np.float64) + matrix = nib.Nifti1Image(data, np.eye(4)) logging.info('Loaded {} of shape {} and data_type {}.'.format( arg, data.shape, data.dtype)) @@ -74,7 +79,7 @@ def load_data(arg): raise ValueError('{} has {} dimensions, not valid.'.format( arg, data.ndim)) - return data + return matrix def main(): @@ -92,12 +97,26 @@ def main(): if args.operation not in OPERATIONS.keys(): parser.error('Operation {} not implemented.'.format(args.operation)) - # Load all input masks. - input_data = [] - for input_arg in args.inputs: - data = load_data(input_arg) - - if args.operation in binary_op and isinstance(data, np.ndarray): + # Find at least one matrix for reference + for input_arg in args.in_matrices: + found_ref = False + if not is_float(input_arg): + ref_data = load_matrix_in_any_format(input_arg) + ref_matrix = nib.Nifti1Image(ref_data, np.eye(4)) + mask = np.zeros(ref_data.shape) + found_ref = True + break + + if not found_ref: + raise ValueError('Requires at least one matrix.') + + # Load all input matrices + input_matrices = [] + for input_arg in args.in_matrices: + matrix = load_matrix(input_arg) + + if args.operation in binary_op and isinstance(matrix, nib.Nifti1Image): + data = matrix.get_fdata(dtype=np.float64) unique = np.unique(data) if not len(unique) <= 2: parser.error('Binary operations can only be performed with ' @@ -109,16 +128,17 @@ def main(): 'Non-zeros will be set to ones.') data[data != 0] = 1 - if isinstance(data, np.ndarray): - data = data.astype(np.float64) - input_data.append(data) + if isinstance(matrix, nib.Nifti1Image): + data = matrix.get_fdata(dtype=np.float64) + mask[data > 0] = 1 + input_matrices.append(matrix) if args.operation == 'convert' and not args.data_type: parser.error('Convert operation must be used with --data_type.') # Perform the request operation try: - output_data = OPERATIONS[args.operation](input_data) + output_data = OPERATIONS[args.operation](input_matrices, ref_matrix) except ValueError: logging.error('{} operation failed.'.format( args.operation.capitalize())) @@ -130,6 +150,9 @@ def main(): else: output_data = output_data.astype(np.float64) + if args.exclude_background: + output_data[mask == 0] = 0 + # Saving in the right format save_matrix_in_any_format(args.out_matrix, output_data) diff --git a/scripts/scil_image_math.py b/scripts/scil_image_math.py index b2ef86e39..58bc2d2cd 100755 --- a/scripts/scil_image_math.py +++ b/scripts/scil_image_math.py @@ -50,7 +50,8 @@ def _build_arg_parser(): help='Data type of the output image. Use the format: ' 'uint8, int16, int/float32, int/float64.') p.add_argument('--exclude_background', action='store_true', - help='Does not affect the background of the original image.') + help='Does not affect the background of the original ' + 'images.') add_overwrite_arg(p) add_verbose_arg(p) @@ -58,7 +59,7 @@ def _build_arg_parser(): return p -def load_data(arg, ref): +def load_img(arg, ref): if is_float(arg): img = float(arg) dtype = np.float64 @@ -115,7 +116,7 @@ def main(): if not is_float(input_arg) and \ not is_header_compatible(ref_img, input_arg): parser.error('Inputs do not have a compatible header.') - img, dtype = load_data(input_arg, ref_img) + img, dtype = load_img(input_arg, ref_img) if isinstance(img, nib.Nifti1Image) and \ dtype != ref_img.get_data_dtype() and \ @@ -159,7 +160,7 @@ def main(): if args.exclude_background: output_data[mask == 0] = 0 - + new_img = nib.Nifti1Image(output_data, ref_img.affine, header=ref_img.header) nib.save(new_img, args.out_image) From 75803d1a0e36d4794c34e0174c7ac7b59266b1e7 Mon Sep 17 00:00:00 2001 From: frheault Date: Wed, 19 Aug 2020 21:32:50 -0400 Subject: [PATCH 24/40] Remove copy --- scilpy/image/operations.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/scilpy/image/operations.py b/scilpy/image/operations.py index d02e7f68e..b8d56c643 100644 --- a/scilpy/image/operations.py +++ b/scilpy/image/operations.py @@ -7,7 +7,6 @@ """ from collections import OrderedDict -from copy import copy import logging import nibabel as nib @@ -230,7 +229,7 @@ def normalize_sum(input_list, ref_img): _validate_type(input_list[0], nib.Nifti1Image) data = input_list[0].get_fdata(dtype=np.float64) - return copy(data) / np.sum(data) + return data / np.sum(data) def normalize_max(input_list, ref_img): @@ -242,7 +241,7 @@ def normalize_max(input_list, ref_img): _validate_type(input_list[0], nib.Nifti1Image) data = input_list[0].get_fdata(dtype=np.float64) - return copy(data) / np.max(data) + return data / np.max(data) def base_10_log(input_list, ref_img): @@ -285,7 +284,7 @@ def convert(input_list, ref_img): _validate_length(input_list, 1) _validate_type(input_list[0], nib.Nifti1Image) - return copy(input_list[0].get_fdata(dtype=np.float64)) + return input_list[0].get_fdata(dtype=np.float64) def addition(input_list, ref_img): From 3a3cac1de66f5d3c545fdd0ab4bf764afbbd4c21 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Thu, 20 Aug 2020 07:28:41 -0400 Subject: [PATCH 25/40] add a helper help --- scilpy/io/utils.py | 3 ++- scripts/scil_prepare_eddy_command.py | 4 +++- scripts/scil_prepare_topup_command.py | 4 +++- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/scilpy/io/utils.py b/scilpy/io/utils.py index 41b783262..a0ea83b4c 100644 --- a/scilpy/io/utils.py +++ b/scilpy/io/utils.py @@ -2,6 +2,7 @@ import os import multiprocessing +import re import shutil import xml.etree.ElementTree as ET @@ -465,7 +466,7 @@ def assert_fsl_options_exist(parser, options_args, command): parser.error('{} command is not supported as fsl ' 'command.'.format(command)) - options = options_args.split() + options = re.split(r'[ =\s]\s*', options_args) res = [i for i in options if "--" in i] res = list(map(lambda x: x.replace('--', ''), res)) diff --git a/scripts/scil_prepare_eddy_command.py b/scripts/scil_prepare_eddy_command.py index 0f9d1453b..34ced61b9 100755 --- a/scripts/scil_prepare_eddy_command.py +++ b/scripts/scil_prepare_eddy_command.py @@ -79,7 +79,9 @@ def _build_arg_parser(): 'Enhances reproducibility.') p.add_argument('--eddy_options', default=None, - help='Additional options you want to use to run eddy.') + help='Additional options you want to use to run eddy.' + 'Add these options using quotes (i.e. "--ol_nstd=6' + ' --mb=4").') add_overwrite_arg(p) add_verbose_arg(p) diff --git a/scripts/scil_prepare_topup_command.py b/scripts/scil_prepare_topup_command.py index 056d08797..7d851aa1e 100755 --- a/scripts/scil_prepare_topup_command.py +++ b/scripts/scil_prepare_topup_command.py @@ -68,7 +68,9 @@ def _build_arg_parser(): 'terminal [%(default)s].') p.add_argument('--topup_options', default=None, - help='Additional options you want to use to run topup.') + help='Additional options you want to use to run topup.' + 'Add these options using quotes (i.e. "--fwhm=6' + ' --miter=4").') add_overwrite_arg(p) add_verbose_arg(p) From 95e979e2e01df6359c20e1271ea884ad6d2493be Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Thu, 20 Aug 2020 12:28:16 -0400 Subject: [PATCH 26/40] answer francois comments --- scripts/scil_prepare_eddy_command.py | 2 +- scripts/scil_prepare_topup_command.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/scil_prepare_eddy_command.py b/scripts/scil_prepare_eddy_command.py index 34ced61b9..4cd6000f3 100755 --- a/scripts/scil_prepare_eddy_command.py +++ b/scripts/scil_prepare_eddy_command.py @@ -79,7 +79,7 @@ def _build_arg_parser(): 'Enhances reproducibility.') p.add_argument('--eddy_options', default=None, - help='Additional options you want to use to run eddy.' + help='Additional options you want to use to run eddy.\n' 'Add these options using quotes (i.e. "--ol_nstd=6' ' --mb=4").') diff --git a/scripts/scil_prepare_topup_command.py b/scripts/scil_prepare_topup_command.py index 7d851aa1e..b6c4b9d25 100755 --- a/scripts/scil_prepare_topup_command.py +++ b/scripts/scil_prepare_topup_command.py @@ -68,7 +68,7 @@ def _build_arg_parser(): 'terminal [%(default)s].') p.add_argument('--topup_options', default=None, - help='Additional options you want to use to run topup.' + help='Additional options you want to use to run topup.\n' 'Add these options using quotes (i.e. "--fwhm=6' ' --miter=4").') From 75e2c707048d392a094a6e943f0c43f883074621 Mon Sep 17 00:00:00 2001 From: frheault Date: Fri, 21 Aug 2020 10:34:52 -0400 Subject: [PATCH 27/40] Remove ref --- scripts/scil_image_math.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/scil_image_math.py b/scripts/scil_image_math.py index 58bc2d2cd..ed51a65b2 100755 --- a/scripts/scil_image_math.py +++ b/scripts/scil_image_math.py @@ -59,7 +59,7 @@ def _build_arg_parser(): return p -def load_img(arg, ref): +def load_img(arg): if is_float(arg): img = float(arg) dtype = np.float64 @@ -116,7 +116,7 @@ def main(): if not is_float(input_arg) and \ not is_header_compatible(ref_img, input_arg): parser.error('Inputs do not have a compatible header.') - img, dtype = load_img(input_arg, ref_img) + img, dtype = load_img(input_arg) if isinstance(img, nib.Nifti1Image) and \ dtype != ref_img.get_data_dtype() and \ From cea0531db60a0b74706abd270eb96e5f99b91c4e Mon Sep 17 00:00:00 2001 From: frheault Date: Tue, 1 Sep 2020 09:15:02 -0400 Subject: [PATCH 28/40] Replace json by txt in doc --- scripts/scil_visualize_connectivity.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/scil_visualize_connectivity.py b/scripts/scil_visualize_connectivity.py index cd2764063..598490a15 100755 --- a/scripts/scil_visualize_connectivity.py +++ b/scripts/scil_visualize_connectivity.py @@ -12,7 +12,7 @@ - Names (using --labels_list and --lookup_table) If the matrix was made from a bigger matrix using scil_reorder_connectivity.py, -provide the json and specify the key (using --reorder_txt) +provide the text file(s), using --labels_list and/or --reorder_txt. """ import argparse @@ -40,9 +40,9 @@ def _build_arg_parser(): g1 = p.add_argument_group(title='Naming options') g1.add_argument('--labels_list', help='List saved by the decomposition script,\n' - 'the json must contain labels rather than coordinates.') + 'must contain labels rather than coordinates (.txt).') g1.add_argument('--reorder_txt', - help='Txt file with two rows (x/y) listing the ordering.') + help='File with two rows (x/y) listing the ordering (.txt).') g1.add_argument('--lookup_table', help='Lookup table with the label number as keys and the ' 'name as values.') From 50d171a895c3eba359972879426294a91fe03c6a Mon Sep 17 00:00:00 2001 From: frheault Date: Thu, 3 Sep 2020 12:26:58 -0400 Subject: [PATCH 29/40] Crash of more than one 4D --- scilpy/image/operations.py | 63 +++++++++++++++++++++----------------- 1 file changed, 35 insertions(+), 28 deletions(-) diff --git a/scilpy/image/operations.py b/scilpy/image/operations.py index b8d56c643..01e8eb0c2 100644 --- a/scilpy/image/operations.py +++ b/scilpy/image/operations.py @@ -386,10 +386,14 @@ def mean(input_list, ref_img): _validate_length(input_list, 1, at_least=True) _validate_imgs(*input_list, ref_img) - if len(input_list) == 1 \ - and not len(input_list[0].header.get_data_shape()) > 3: - logging.error('This operation with only one operand requires 4D data.') - raise ValueError + if len(input_list[0].header.get_data_shape()) > 3: + if not len(input_list) == 1: + raise ValueError( + 'This operation with 4D data only support one operand.') + else: + if len(input_list) == 1: + raise ValueError( + 'This operation with only one operand requires 4D data.') if len(input_list[0].header.get_data_shape()) > 3: return np.average(input_list[0].get_fdata(dtype=np.float64), axis=-1) @@ -407,19 +411,22 @@ def std(input_list, ref_img): _validate_length(input_list, 1, at_least=True) _validate_imgs(*input_list, ref_img) - if len(input_list) == 1 \ - and not len(input_list[0].header.get_data_shape()) > 3: - logging.error('This operation with only one operand requires 4D data.') - raise ValueError + if len(input_list[0].header.get_data_shape()) > 3: + if not len(input_list) == 1: + raise ValueError( + 'This operation with 4D data only support one operand.') + else: + if len(input_list) == 1: + raise ValueError('This operation with only one operand requires 4D data.') if len(input_list[0].header.get_data_shape()) > 3: return np.std(input_list[0].get_fdata(dtype=np.float64), axis=-1) else: - mean_data = mean(input_list, ref_img) - output_data = np.zeros(input_list[0].header.get_data_shape()) + mean_data=mean(input_list, ref_img) + output_data=np.zeros(input_list[0].header.get_data_shape()) for img in input_list: if isinstance(img, nib.Nifti1Image): - data = img.get_fdata(dtype=np.float64) + data=img.get_fdata(dtype=np.float64) output_data += (data - mean_data) ** 2 img.uncache() else: @@ -433,8 +440,8 @@ def union(input_list, ref_img): Operation on binary image to keep voxels, that are non-zero, in at least one file. """ - output_data = addition(input_list, ref_img) - output_data[output_data != 0] = 1 + output_data=addition(input_list, ref_img) + output_data[output_data != 0]=1 return output_data @@ -445,8 +452,8 @@ def intersection(input_list, ref_img): Operation on binary image to keep the voxels, that are non-zero, are present in all files. """ - output_data = multiplication(input_list, ref_img) - output_data[output_data != 0] = 1 + output_data=multiplication(input_list, ref_img) + output_data[output_data != 0]=1 return output_data @@ -460,18 +467,18 @@ def difference(input_list, ref_img): _validate_length(input_list, 2) _validate_imgs(*input_list, ref_img) - output_data = np.zeros(ref_img.header.get_data_shape(), dtype=np.float64) + output_data=np.zeros(ref_img.header.get_data_shape(), dtype=np.float64) if isinstance(input_list[0], nib.Nifti1Image): - data_1 = input_list[0].get_fdata(dtype=np.float64) + data_1=input_list[0].get_fdata(dtype=np.float64) else: - data_1 = input_list[0] + data_1=input_list[0] if isinstance(input_list[1], nib.Nifti1Image): - data_2 = input_list[1].get_fdata(dtype=np.float64) + data_2=input_list[1].get_fdata(dtype=np.float64) else: - data_2 = input_list[1] + data_2=input_list[1] - output_data[data_1 != 0] = 1 - output_data[data_2 != 0] = 0 + output_data[data_1 != 0]=1 + output_data[data_2 != 0]=0 return output_data @@ -483,10 +490,10 @@ def invert(input_list, ref_img): _validate_length(input_list, 1) _validate_type(input_list[0], nib.Nifti1Image) - data = input_list[0].get_fdata(dtype=np.float64) - output_data = np.zeros(data.shape, dtype=np.float64) - output_data[data != 0] = 0 - output_data[data == 0] = 1 + data=input_list[0].get_fdata(dtype=np.float64) + output_data=np.zeros(data.shape, dtype=np.float64) + output_data[data != 0]=0 + output_data[data == 0]=1 return output_data @@ -500,9 +507,9 @@ def concat(input_list, ref_img): if len(input_list[0].header.get_data_shape()) != 3: raise ValueError('Concatenate require 3D arrays.') - input_data = [] + input_data=[] for img in input_list: - data = img.get_fdata(dtype=np.float64) + data=img.get_fdata(dtype=np.float64) input_data.append(data) img.uncache() return np.rollaxis(np.stack(input_data), axis=0, start=4) From ac950d8d741436743dfdbea50def76a340173c9b Mon Sep 17 00:00:00 2001 From: frheault Date: Thu, 10 Sep 2020 13:52:05 -0400 Subject: [PATCH 30/40] Pep8 fix --- scilpy/image/operations.py | 43 +++++++++++++++++++------------------- 1 file changed, 22 insertions(+), 21 deletions(-) diff --git a/scilpy/image/operations.py b/scilpy/image/operations.py index 01e8eb0c2..4d1b61b73 100644 --- a/scilpy/image/operations.py +++ b/scilpy/image/operations.py @@ -417,16 +417,17 @@ def std(input_list, ref_img): 'This operation with 4D data only support one operand.') else: if len(input_list) == 1: - raise ValueError('This operation with only one operand requires 4D data.') + raise ValueError( + 'This operation with only one operand requires 4D data.') if len(input_list[0].header.get_data_shape()) > 3: return np.std(input_list[0].get_fdata(dtype=np.float64), axis=-1) else: - mean_data=mean(input_list, ref_img) - output_data=np.zeros(input_list[0].header.get_data_shape()) + mean_data = mean(input_list, ref_img) + output_data = np.zeros(input_list[0].header.get_data_shape()) for img in input_list: if isinstance(img, nib.Nifti1Image): - data=img.get_fdata(dtype=np.float64) + data = img.get_fdata(dtype=np.float64) output_data += (data - mean_data) ** 2 img.uncache() else: @@ -440,8 +441,8 @@ def union(input_list, ref_img): Operation on binary image to keep voxels, that are non-zero, in at least one file. """ - output_data=addition(input_list, ref_img) - output_data[output_data != 0]=1 + output_data = addition(input_list, ref_img) + output_data[output_data != 0] = 1 return output_data @@ -452,8 +453,8 @@ def intersection(input_list, ref_img): Operation on binary image to keep the voxels, that are non-zero, are present in all files. """ - output_data=multiplication(input_list, ref_img) - output_data[output_data != 0]=1 + output_data = multiplication(input_list, ref_img) + output_data[output_data != 0] = 1 return output_data @@ -467,18 +468,18 @@ def difference(input_list, ref_img): _validate_length(input_list, 2) _validate_imgs(*input_list, ref_img) - output_data=np.zeros(ref_img.header.get_data_shape(), dtype=np.float64) + output_data = np.zeros(ref_img.header.get_data_shape(), dtype=np.float64) if isinstance(input_list[0], nib.Nifti1Image): - data_1=input_list[0].get_fdata(dtype=np.float64) + data_1 = input_list[0].get_fdata(dtype=np.float64) else: - data_1=input_list[0] + data_1 = input_list[0] if isinstance(input_list[1], nib.Nifti1Image): - data_2=input_list[1].get_fdata(dtype=np.float64) + data_2 = input_list[1].get_fdata(dtype=np.float64) else: - data_2=input_list[1] + data_2 = input_list[1] - output_data[data_1 != 0]=1 - output_data[data_2 != 0]=0 + output_data[data_1 != 0] = 1 + output_data[data_2 != 0] = 0 return output_data @@ -490,10 +491,10 @@ def invert(input_list, ref_img): _validate_length(input_list, 1) _validate_type(input_list[0], nib.Nifti1Image) - data=input_list[0].get_fdata(dtype=np.float64) - output_data=np.zeros(data.shape, dtype=np.float64) - output_data[data != 0]=0 - output_data[data == 0]=1 + data = input_list[0].get_fdata(dtype=np.float64) + output_data = np.zeros(data.shape, dtype=np.float64) + output_data[data != 0] = 0 + output_data[data == 0] = 1 return output_data @@ -507,9 +508,9 @@ def concat(input_list, ref_img): if len(input_list[0].header.get_data_shape()) != 3: raise ValueError('Concatenate require 3D arrays.') - input_data=[] + input_data = [] for img in input_list: - data=img.get_fdata(dtype=np.float64) + data = img.get_fdata(dtype=np.float64) input_data.append(data) img.uncache() return np.rollaxis(np.stack(input_data), axis=0, start=4) From 169ffa869f8f582b3b9268ca40c9345d292fbec3 Mon Sep 17 00:00:00 2001 From: frheault Date: Thu, 10 Sep 2020 15:42:27 -0400 Subject: [PATCH 31/40] Fix dipy version --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index faa175273..222b8245b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ bz2file==0.98.* coloredlogs==10.0.* cycler==0.10.* Cython==0.29.* -dipy>=1.1.0 +dipy==1.1.* fury==0.2.* future==0.17.* h5py==2.9.* From ec03fe0f0b07ac7faaf53de6e783b64cf2def042 Mon Sep 17 00:00:00 2001 From: AntoineTheb Date: Fri, 18 Sep 2020 16:32:53 -0400 Subject: [PATCH 32/40] ENH: Update dipy,vtk,ubuntu version --- .travis.yml | 6 +++--- requirements.txt | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index f169a84bc..ac4e41479 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,12 +1,12 @@ -dist: xenial +dist: bionic language: python python: - - "3.5" - "3.6" - "3.7" + - "3.8" before_install: - - sudo wget -O- http://neuro.debian.net/lists/xenial.us-ca.full | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list + - sudo wget -O- http://neuro.debian.net/lists/bionic.us-ca.full | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list - sudo apt-key adv --recv-keys --keyserver pool.sks-keyservers.net 2649A5A9 || { sudo wget -q -O- http://neuro.debian.net/_static/neuro.debian.net.asc | sudo apt-key add -; } - sudo apt-get update - sudo apt-get update diff --git a/requirements.txt b/requirements.txt index 1e54ce2d4..871333f76 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ bz2file==0.98.* coloredlogs==10.0.* cycler==0.10.* Cython==0.29.* -dipy==1.1.* +dipy>=1.1.2 fury==0.2.* future==0.17.* h5py==2.9.* @@ -20,7 +20,7 @@ scikit-learn==0.22.* scipy==1.4.* setuptools==46.1.* six==1.15.* -vtk==8.1.* +vtk==9.0.1 trimeshpy==0.0.* coloredlogs==10.0.* nilearn==0.6.* From 8939ff458874e3406bb08fa92c72305ba076a99e Mon Sep 17 00:00:00 2001 From: AntoineTheb Date: Sat, 19 Sep 2020 11:00:15 -0400 Subject: [PATCH 33/40] ENH: Add multi-shell multi-tissue scripts for computing frfs and fodfs --- scilpy/reconst/frf.py | 161 ++++++++++++++++++++ scripts/scil_compute_msmt_fodf.py | 206 +++++++++++++++++++++++++ scripts/scil_compute_msmt_frf.py | 243 ++++++++++++++++++++++++++++++ 3 files changed, 610 insertions(+) create mode 100644 scripts/scil_compute_msmt_fodf.py create mode 100644 scripts/scil_compute_msmt_frf.py diff --git a/scilpy/reconst/frf.py b/scilpy/reconst/frf.py index a6df20384..56b0e3ae0 100644 --- a/scilpy/reconst/frf.py +++ b/scilpy/reconst/frf.py @@ -4,6 +4,7 @@ from dipy.core.gradients import gradient_table from dipy.reconst.csdeconv import auto_response +from dipy.reconst.mcsd import mask_for_response_msmt, response_from_mask_msmt from dipy.segment.mask import applymask import numpy as np @@ -127,3 +128,163 @@ def compute_ssst_frf(data, bvals, bvecs, mask=None, mask_wm=None, response[0][2], response[1]]) return full_response + + +def compute_msmt_frf(data, bvals, bvecs, data_dti=None, bvals_dti=None, + bvecs_dti=None, mask=None, mask_wm=None, mask_gm=None, + mask_csf=None, fa_thr_wm=0.7, fa_thr_gm=0.2, + fa_thr_csf=0.1, md_thr_gm=0.0007, md_thr_csf=0.003, + min_nvox=300, roi_radii=10, roi_center=None, + tol=20, force_b0_threshold=False): + """Compute a single-shell (under b=1500), single-tissue single Fiber + Response Function from a DWI volume. + A DTI fit is made, and voxels containing a single fiber population are + found using a threshold on the FA. + + Parameters + ---------- + data : ndarray + 4D Input diffusion volume with shape (X, Y, Z, N) + bvals : ndarray + 1D bvals array with shape (N,) + bvecs : ndarray + 2D bvecs array with shape (N, 3) + mask : ndarray, optional + 3D mask with shape (X,Y,Z) + Binary mask. Only the data inside the mask will be used for + computations and reconstruction. + mask_wm : ndarray, optional + 3D mask with shape (X,Y,Z) + Binary white matter mask. Only the data inside this mask will be used + to estimate the fiber response function of WM. + mask_gm : ndarray, optional + 3D mask with shape (X,Y,Z) + Binary grey matter mask. Only the data inside this mask will be used + to estimate the fiber response function of GM. + mask_csf : ndarray, optional + 3D mask with shape (X,Y,Z) + Binary csf mask. Only the data inside this mask will be used to + estimate the fiber response function of CSF. + fa_thr_wm : float, optional + Use this threshold to select single WM fiber voxels from the FA inside + the WM mask defined by mask_wm. Each voxel above this threshold will be + selected. Defaults to 0.7 + fa_thr_gm : float, optional + Use this threshold to select GM voxels from the FA inside the GM mask + defined by mask_gm. Each voxel below this threshold will be selected. + Defaults to 0.2 + fa_thr_csf : float, optional + Use this threshold to select CSF voxels from the FA inside the CSF mask + defined by mask_csf. Each voxel below this threshold will be selected. + Defaults to 0.1 + md_thr_gm : float, optional + Use this threshold to select GM voxels from the MD inside the GM mask + defined by mask_gm. Each voxel below this threshold will be selected. + Defaults to 0.0007 + md_thr_csf : float, optional + Use this threshold to select CSF voxels from the MD inside the CSF mask + defined by mask_csf. Each voxel below this threshold will be selected. + Defaults to 0.003 + min_nvox : int, optional + Minimal number of voxels needing to be identified as single fiber + voxels in the automatic estimation. Defaults to 300. + roi_radii : int or array-like (3,), optional + Use those radii to select a cuboid roi to estimate the FRF. The roi + will be a cuboid spanning from the middle of the volume in each + direction with the different radii. Defaults to 10. + roi_center : tuple(3), optional + Use this center to span the roi of size roi_radius (center of the + 3D volume). + tol : int + tolerance gap for b-values clustering. Defaults to 20 + force_b0_threshold : bool, optional + If set, will continue even if the minimum bvalue is suspiciously high. + + Returns + ------- + reponses : list of ndarray + Fiber Response Function of each (3) tissue type, with shape (4, N). + frf_masks : list of ndarray + Mask where the frf was calculated, for each (3) tissue type, with + shape (X, Y, Z). + + Raises + ------ + ValueError + If less than `min_nvox` voxels were found with sufficient FA to + estimate the FRF. + """ + if not is_normalized_bvecs(bvecs): + logging.warning('Your b-vectors do not seem normalized...') + bvecs = normalize_bvecs(bvecs) + + check_b0_threshold(force_b0_threshold, bvals.min()) + + gtab = gradient_table(bvals, bvecs) + + if data_dti is None and bvals_dti is None and bvecs_dti is None: + logging.warning( + "No data specific to DTI was given. If b-values go over 1200, " + "this might produce wrong results.") + wm_frf_mask, gm_frf_mask, csf_frf_mask \ + = mask_for_response_msmt(gtab, data, + roi_center=roi_center, + roi_radii=roi_radii, + wm_fa_thr=fa_thr_wm, + gm_fa_thr=fa_thr_gm, + csf_fa_thr=fa_thr_csf, + gm_md_thr=md_thr_gm, + csf_md_thr=md_thr_csf) + elif data_dti is not None and bvals_dti is not None and bvecs_dti is not None: + if not is_normalized_bvecs(bvecs_dti): + logging.warning('Your b-vectors do not seem normalized...') + bvecs_dti = normalize_bvecs(bvecs_dti) + + check_b0_threshold(force_b0_threshold, bvals_dti.min()) + gtab_dti = gradient_table(bvals_dti, bvecs_dti) + + wm_frf_mask, gm_frf_mask, csf_frf_mask \ + = mask_for_response_msmt(gtab_dti, data_dti, + roi_center=roi_center, + roi_radii=roi_radii, + wm_fa_thr=fa_thr_wm, + gm_fa_thr=fa_thr_gm, + csf_fa_thr=fa_thr_csf, + gm_md_thr=md_thr_gm, + csf_md_thr=md_thr_csf) + else: + msg = """Input not valid. Either give no _dti input, or give all + data_dti, bvals_dti and bvecs_dti.""" + raise ValueError(msg) + + if mask is not None: + wm_frf_mask *= mask + gm_frf_mask *= mask + csf_frf_mask *= mask + if mask_wm is not None: + wm_frf_mask *= mask_wm + if mask_gm is not None: + gm_frf_mask *= mask_gm + if mask_csf is not None: + csf_frf_mask *= mask_csf + + msg = """Could not find at least {0} voxels for the {1} mask. Look at + previous warnings or be sure that external tissue masks overlap with the + cuboid ROI.""" + + if np.sum(wm_frf_mask) < min_nvox: + raise ValueError(msg.format(min_nvox, "WM")) + if np.sum(gm_frf_mask) < min_nvox: + raise ValueError(msg.format(min_nvox, "GM")) + if np.sum(csf_frf_mask) < min_nvox: + raise ValueError(msg.format(min_nvox, "CSF")) + + frf_masks = [wm_frf_mask, gm_frf_mask, csf_frf_mask] + + response_wm, response_gm, response_csf \ + = response_from_mask_msmt(gtab, data, wm_frf_mask, gm_frf_mask, + csf_frf_mask, tol=tol) + + responses = [response_wm, response_gm, response_csf] + + return responses, frf_masks diff --git a/scripts/scil_compute_msmt_fodf.py b/scripts/scil_compute_msmt_fodf.py new file mode 100644 index 000000000..68c9b2547 --- /dev/null +++ b/scripts/scil_compute_msmt_fodf.py @@ -0,0 +1,206 @@ +#! /usr/bin/env python +# -*- coding: utf-8 -*- + +""" +Script to compute Multishell Multi-tissue Constrained Spherical Deconvolution +ODFs. + +By default, will output all possible files, using default names. +Specific names can be specified using the file flags specified in the +"File flags" section. + +If --not_all is set, only the files specified explicitly by the flags +will be output. + +Based on B. Jeurissen et al., Multi-tissue constrained spherical +deconvolution for improved analysis of multi-shell diffusion +MRI data. Neuroimage (2014) +""" + +import argparse +import logging + +from dipy.core.gradients import gradient_table, unique_bvals_tolerance +from dipy.data import get_sphere +from dipy.io.gradients import read_bvals_bvecs +from dipy.reconst.mcsd import MultiShellDeconvModel, multi_shell_fiber_response +import nibabel as nib +import numpy as np + +from scilpy.io.image import get_data_as_mask +from scilpy.io.utils import (add_overwrite_arg, assert_inputs_exist, + assert_outputs_exist, add_force_b0_arg, + add_sh_basis_args, add_processes_arg) +from scilpy.reconst.multi_processes import fit_from_model, convert_sh_basis +from scilpy.utils.bvec_bval_tools import (check_b0_threshold, normalize_bvecs, + is_normalized_bvecs) + + +def _build_arg_parser(): + + p = argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) + + p.add_argument('input', + help='Path of the input diffusion volume.') + p.add_argument('bvals', + help='Path of the bvals file, in FSL format.') + p.add_argument('bvecs', + help='Path of the bvecs file, in FSL format.') + p.add_argument('wm_frf_file', + help='Text file of WM response function.') + p.add_argument('gm_frf_file', + help='Text file of GM response function.') + p.add_argument('csf_frf_file', + help='Text file of CSF response function.') + + p.add_argument( + '--sh_order', metavar='int', default=8, type=int, + help='SH order used for the CSD. (Default: 8)') + p.add_argument( + '--mask', metavar='', + help='Path to a binary mask. Only the data inside the ' + 'mask will be used for computations and reconstruction.') + + add_force_b0_arg(p) + add_sh_basis_args(p) + add_processes_arg(p) + add_overwrite_arg(p) + + p.add_argument( + '--not_all', action='store_true', + help='If set, only saves the files specified using the ' + 'file flags. (Default: False)') + + g = p.add_argument_group(title='File flags') + + g.add_argument( + '--wm_out_fODF', metavar='file', default='', + help='Output filename for the WM fODF coefficients.') + g.add_argument( + '--gm_out_fODF', metavar='file', default='', + help='Output filename for the GM fODF coefficients.') + g.add_argument( + '--csf_out_fODF', metavar='file', default='', + help='Output filename for the CSF fODF coefficients.') + g.add_argument( + '--vf', metavar='file', default='', + help='Output filename for the volume fractions map.') + + return p + + +def main(): + parser = _build_arg_parser() + args = parser.parse_args() + logging.basicConfig(level=logging.INFO) + + if not args.not_all: + args.wm_out_fODF = args.wm_out_fODF or 'wm_fODF.nii.gz' + args.gm_out_fODF = args.gm_out_fODF or 'gm_fodf.nii.gz' + args.csf_out_fODF = args.csf_out_fODF or 'csf_fodf.nii.gz' + args.vf = args.vf or 'vf.nii.gz' + + arglist = [args.wm_out_fODF, args.gm_out_fODF, args.csf_out_fODF, args.vf] + if args.not_all and not any(arglist): + parser.error('When using --not_all, you need to specify at least ' + + 'one file to output.') + + assert_inputs_exist(parser, [args.input, args.bvals, args.bvecs, + args.wm_frf_file, args.gm_frf_file, + args.csf_frf_file]) + assert_outputs_exist(parser, args, arglist) + + # Loading data + wm_frf = np.loadtxt(args.wm_frf_file) + gm_frf = np.loadtxt(args.gm_frf_file) + csf_frf = np.loadtxt(args.csf_frf_file) + vol = nib.load(args.input) + data = vol.get_fdata(dtype=np.float32) + bvals, bvecs = read_bvals_bvecs(args.bvals, args.bvecs) + + # Checking mask + if args.mask is None: + mask = None + else: + mask = get_data_as_mask(nib.load(args.mask), dtype=bool) + if mask.shape != data.shape[:-1]: + raise ValueError("Mask is not the same shape as data.") + + sh_order = args.sh_order + + # Checking data and sh_order + check_b0_threshold(args.force_b0_threshold, bvals.min()) + if data.shape[-1] < (sh_order + 1) * (sh_order + 2) / 2: + logging.warning( + 'We recommend having at least {} unique DWIs volumes, but you ' + 'currently have {} volumes. Try lowering the parameter --sh_order ' + 'in case of non convergence.'.format( + (sh_order + 1) * (sh_order + 2) / 2, data.shape[-1])) + + # Checking bvals, bvecs values and loading gtab + if not is_normalized_bvecs(bvecs): + logging.warning('Your b-vectors do not seem normalized...') + bvecs = normalize_bvecs(bvecs) + gtab = gradient_table(bvals, bvecs, b0_threshold=bvals.min()) + + # Checking response functions and computing msmt response function + if not wm_frf.shape[1] == 4: + raise ValueError('WM frf file did not contain 4 elements. ' + 'Invalid or deprecated FRF format') + if not gm_frf.shape[1] == 4: + raise ValueError('GM frf file did not contain 4 elements. ' + 'Invalid or deprecated FRF format') + if not csf_frf.shape[1] == 4: + raise ValueError('CSF frf file did not contain 4 elements. ' + 'Invalid or deprecated FRF format') + ubvals = unique_bvals_tolerance(bvals, tol=20) + msmt_response = multi_shell_fiber_response(sh_order, ubvals, + wm_frf, gm_frf, csf_frf) + + # Loading spheres + reg_sphere = get_sphere('symmetric362') + + # Computing msmt-CSD + msmt_model = MultiShellDeconvModel(gtab, msmt_response, + reg_sphere=reg_sphere, + sh_order=sh_order) + + # Computing msmt-CSD fit + msmt_fit = fit_from_model(msmt_model, data, + mask=mask, nbr_processes=args.nbr_processes) + + # Saving results + if args.wm_out_fODF: + shm_coeff = msmt_fit.shm_coeff + if args.sh_basis == 'tournier07': + shm_coeff = convert_sh_basis(shm_coeff, reg_sphere, mask=mask, + nbr_processes=args.nbr_processes) + nib.save(nib.Nifti1Image(shm_coeff.astype(np.float32), + vol.affine), args.wm_out_fODF) + + if args.gm_out_fODF: + shm_coeff = msmt_fit.all_shm_coeff[..., 1] + if args.sh_basis == 'tournier07': + shm_coeff = shm_coeff.reshape(shm_coeff.shape + (1,)) + shm_coeff = convert_sh_basis(shm_coeff, reg_sphere, mask=mask, + nbr_processes=args.nbr_processes) + nib.save(nib.Nifti1Image(shm_coeff.astype(np.float32), + vol.affine), args.gm_out_fODF) + + if args.csf_out_fODF: + shm_coeff = msmt_fit.all_shm_coeff[..., 0] + if args.sh_basis == 'tournier07': + shm_coeff = shm_coeff.reshape(shm_coeff.shape + (1,)) + shm_coeff = convert_sh_basis(shm_coeff, reg_sphere, mask=mask, + nbr_processes=args.nbr_processes) + nib.save(nib.Nifti1Image(shm_coeff.astype(np.float32), + vol.affine), args.csf_out_fODF) + + if args.vf: + nib.save(nib.Nifti1Image(msmt_fit.volume_fractions.astype(np.float32), + vol.affine), args.vf) + + +if __name__ == "__main__": + main() diff --git a/scripts/scil_compute_msmt_frf.py b/scripts/scil_compute_msmt_frf.py new file mode 100644 index 000000000..f1ac2c140 --- /dev/null +++ b/scripts/scil_compute_msmt_frf.py @@ -0,0 +1,243 @@ +#! /usr/bin/env python +# -*- coding: utf-8 -*- + +""" +Compute response functions for multi-shell multi-tissue (MSMT) +constrained spherical deconvolution from DWI data. + +The script computes a response function for white-matter (wm), +gray-matter (gm), csf and the mean b=0. + +In the wm, we compute the response function in each voxels where +the FA is superior at threshold_fa_wm. + +In the gm (or csf), we compute the response function in each voxels where +the FA is below at threshold_fa_gm (or threshold_fa_csf) and where +the MD is below threshold_md_gm (or threshold_md_csf). + +Based on B. Jeurissen et al., Multi-tissue constrained spherical +deconvolution for improved analysis of multi-shell diffusion +MRI data. Neuroimage (2014) +""" + +import argparse +import logging + +from dipy.core.gradients import unique_bvals_tolerance +from dipy.io.gradients import read_bvals_bvecs +import nibabel as nib +import numpy as np + +from scilpy.io.image import get_data_as_mask +from scilpy.io.utils import (add_force_b0_arg, + add_overwrite_arg, add_verbose_arg, + assert_inputs_exist, assert_outputs_exist) +from scilpy.reconst.frf import compute_msmt_frf +from scilpy.utils.bvec_bval_tools import extract_dwi_shell + + +def buildArgsParser(): + + p = argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) + + p.add_argument('input', + help='Path to the input diffusion volume.') + p.add_argument('bvals', + help='Path to the bvals file, in FSL format.') + p.add_argument('bvecs', + help='Path to the bvecs file, in FSL format.') + p.add_argument('wm_frf_file', + help='Path to the output WM frf file, in .txt format.') + p.add_argument('gm_frf_file', + help='Path to the output GM frf file, in .txt format.') + p.add_argument('csf_frf_file', + help='Path to the output CSF frf file, in .txt format.') + + p.add_argument( + '--mask', + help='Path to a binary mask. Only the data inside the mask will be ' + 'used for computations and reconstruction. Useful if no tissue ' + 'masks are available.') + p.add_argument( + '--mask_wm', + help='Path to the WM mask file.') + p.add_argument( + '--mask_gm', + help='Path to the GM mask file.') + p.add_argument( + '--mask_csf', + help='Path to the CSF mask file.') + + p.add_argument( + '--fa_thr_wm', default=0.7, type=float, + help='If supplied, use this threshold to select single WM fiber ' + 'voxels from the FA inside the WM mask defined by mask_wm. Each ' + 'voxel above this threshold will be selected. [%(default)s]') + p.add_argument( + '--fa_thr_gm', default=0.2, type=float, + help='If supplied, use this threshold to select GM voxels from the FA ' + 'inside the GM mask defined by mask_gm. Each voxel below this ' + 'threshold will be selected. [%(default)s]') + p.add_argument( + '--fa_thr_csf', default=0.1, type=float, + help='If supplied, use this threshold to select CSF voxels from the ' + 'FA inside the CSF mask defined by mask_csf. Each voxel below ' + 'this threshold will be selected. [%(default)s]') + p.add_argument( + '--md_thr_gm', default=0.0007, type=float, + help='If supplied, use this threshold to select GM voxels from the MD ' + 'inside the GM mask defined by mask_gm. Each voxel below this ' + 'threshold will be selected. [%(default)s]') + p.add_argument( + '--md_thr_csf', default=0.003, type=float, + help='If supplied, use this threshold to select CSF voxels from the ' + 'MD inside the CSF mask defined by mask_csf. Each voxel below ' + 'this threshold will be selected. [%(default)s]') + + p.add_argument( + '--min_nvox', default=100, type=int, + help='Minimal number of voxels needed for each tissue masks ' + 'in order to proceed to frf estimation. [%(default)s]') + p.add_argument( + '--tolerance', type=int, default=20, + help='The tolerated gap between the b-values to ' + 'extract\nand the current b-value. [%(default)s]') + p.add_argument( + '--roi_radii', default=[10], nargs='+', type=int, + help='If supplied, use those radii to select a cuboid roi ' + 'to estimate the response functions. The roi will be ' + 'a cuboid spanning from the middle of the volume in ' + 'each direction with the different radii. The type is ' + 'either an int or an array-like (3,). [%(default)s]') + p.add_argument( + '--roi_center', metavar='tuple(3)', nargs=3, type=int, + help='If supplied, use this center to span the cuboid roi ' + 'using roi_radii. [center of the 3D volume]') + + p.add_argument( + '--wm_frf_mask', metavar='file', default='', + help='Path to the output WM frf mask file, the voxels used ' + 'to compute the WM frf.') + p.add_argument( + '--gm_frf_mask', metavar='file', default='', + help='Path to the output GM frf mask file, the voxels used ' + 'to compute the GM frf.') + p.add_argument( + '--csf_frf_mask', metavar='file', default='', + help='Path to the output CSF frf mask file, the voxels used ' + 'to compute the CSF frf.') + + p.add_argument( + '--frf_table', metavar='file', default='', + help='Path to the output frf table file. Saves the frf for ' + 'each b-value, in .txt format.') + + add_force_b0_arg(p) + add_overwrite_arg(p) + add_verbose_arg(p) + + return p + + +def main(): + + parser = buildArgsParser() + args = parser.parse_args() + + if args.verbose: + logging.basicConfig(level=logging.DEBUG) + else: + logging.basicConfig(level=logging.INFO) + + assert_inputs_exist(parser, [args.input, args.bvals, args.bvecs]) + assert_outputs_exist(parser, args, [args.wm_frf_file, args.gm_frf_file, + args.csf_frf_file]) + + if len(args.roi_radii) == 1: + roi_radii = args.roi_radii[0] + elif len(args.roi_radii) == 2: + parser.error('--roi_radii cannot be of size (2,).') + else: + roi_radii = args.roi_radii + roi_center = args.roi_center + + vol = nib.load(args.input) + data = vol.get_fdata(dtype=np.float32) + bvals, bvecs = read_bvals_bvecs(args.bvals, args.bvecs) + + tol = args.tolerance + + list_bvals = unique_bvals_tolerance(bvals, tol=tol) + if not np.all(list_bvals <= 1200): + outputs = extract_dwi_shell(vol, bvals, bvecs, + list_bvals[list_bvals <= 1200], + tol=tol) + _, data_dti, bvals_dti, bvecs_dti = outputs + bvals_dti = np.squeeze(bvals_dti) + else: + data_dti = None + bvals_dti = None + bvecs_dti = None + + mask = None + if args.mask is not None: + mask = get_data_as_mask(nib.load(args.mask), dtype=bool) + if mask.shape != data.shape[:-1]: + raise ValueError("Mask is not the same shape as data.") + mask_wm = None + mask_gm = None + mask_csf = None + if args.mask_wm: + mask_wm = get_data_as_mask(nib.load(args.mask_wm), dtype=bool) + if args.mask_gm: + mask_gm = get_data_as_mask(nib.load(args.mask_gm), dtype=bool) + if args.mask_csf: + mask_csf = get_data_as_mask(nib.load(args.mask_csf), dtype=bool) + + force_b0_thr = args.force_b0_threshold + responses, frf_masks = compute_msmt_frf(data, bvals, bvecs, + data_dti=data_dti, + bvals_dti=bvals_dti, + bvecs_dti=bvecs_dti, + mask=mask, mask_wm=mask_wm, + mask_gm=mask_gm, mask_csf=mask_csf, + fa_thr_wm=args.fa_thr_wm, + fa_thr_gm=args.fa_thr_gm, + fa_thr_csf=args.fa_thr_csf, + md_thr_gm=args.md_thr_gm, + md_thr_csf=args.md_thr_csf, + min_nvox=args.min_nvox, + roi_radii=roi_radii, + roi_center=roi_center, + tol=tol, + force_b0_threshold=force_b0_thr) + + masks_files = [args.wm_frf_mask, args.gm_frf_mask, args.csf_frf_mask] + for mask, mask_file in zip(frf_masks, masks_files): + if mask_file: + nib.save(nib.Nifti1Image(mask.astype(np.uint16), vol.get_affine()), + mask_file) + + frf_out = [args.wm_frf_file, args.gm_frf_file, args.csf_frf_file] + + for frf, response in zip(frf_out, responses): + np.savetxt(frf, response) + + if args.frf_table: + if list_bvals[0] < tol: + bvals = list_bvals[1:] + else: + bvals = list_bvals + response_csf = responses[2] + response_gm = responses[1] + response_wm = responses[0] + iso_responses = np.concatenate((response_csf[:, :3], + response_gm[:, :3]), axis=1) + responses = np.concatenate((iso_responses, response_wm[:, :3]), axis=1) + frf_table = np.vstack((bvals, responses.T)).T + np.savetxt(args.frf_table, frf_table) + + +if __name__ == "__main__": + main() From 96c9fab51d19ec7f242bf312da44bc297a6e07a6 Mon Sep 17 00:00:00 2001 From: AntoineTheb Date: Sat, 19 Sep 2020 11:01:55 -0400 Subject: [PATCH 34/40] FIX: Pep8 --- scilpy/reconst/frf.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/scilpy/reconst/frf.py b/scilpy/reconst/frf.py index 56b0e3ae0..13c9cba27 100644 --- a/scilpy/reconst/frf.py +++ b/scilpy/reconst/frf.py @@ -235,7 +235,11 @@ def compute_msmt_frf(data, bvals, bvecs, data_dti=None, bvals_dti=None, csf_fa_thr=fa_thr_csf, gm_md_thr=md_thr_gm, csf_md_thr=md_thr_csf) - elif data_dti is not None and bvals_dti is not None and bvecs_dti is not None: + elif ( + data_dti is not None and + bvals_dti is not None and + bvecs_dti is not None + ): if not is_normalized_bvecs(bvecs_dti): logging.warning('Your b-vectors do not seem normalized...') bvecs_dti = normalize_bvecs(bvecs_dti) From cfa0523ffe95e9f8f82e8adae739c4c02affd80b Mon Sep 17 00:00:00 2001 From: AntoineTheb Date: Sat, 19 Sep 2020 11:28:49 -0400 Subject: [PATCH 35/40] ENH: Add proper files and revert wrong files --- scilpy/reconst/frf.py | 189 +------------- scripts/scil_compute_msmt_fodf.py | 206 --------------- scripts/scil_compute_msmt_frf.py | 243 ------------------ ...pute_fodf.py => scil_compute_ssst_fodf.py} | 0 scripts/scil_compute_ssst_frf.py | 19 +- 5 files changed, 27 insertions(+), 630 deletions(-) delete mode 100644 scripts/scil_compute_msmt_fodf.py delete mode 100644 scripts/scil_compute_msmt_frf.py rename scripts/{scil_compute_fodf.py => scil_compute_ssst_fodf.py} (100%) diff --git a/scilpy/reconst/frf.py b/scilpy/reconst/frf.py index 13c9cba27..a9f4e2450 100644 --- a/scilpy/reconst/frf.py +++ b/scilpy/reconst/frf.py @@ -3,8 +3,8 @@ import logging from dipy.core.gradients import gradient_table -from dipy.reconst.csdeconv import auto_response -from dipy.reconst.mcsd import mask_for_response_msmt, response_from_mask_msmt +from dipy.reconst.csdeconv import (mask_for_response_ssst, + response_from_mask_ssst) from dipy.segment.mask import applymask import numpy as np @@ -14,7 +14,7 @@ def compute_ssst_frf(data, bvals, bvecs, mask=None, mask_wm=None, fa_thresh=0.7, min_fa_thresh=0.5, min_nvox=300, - roi_radius=10, roi_center=None, force_b0_threshold=False): + roi_radii=10, roi_center=None, force_b0_threshold=False): """Compute a single-shell (under b=1500), single-tissue single Fiber Response Function from a DWI volume. A DTI fit is made, and voxels containing a single fiber population are @@ -47,10 +47,10 @@ def compute_ssst_frf(data, bvals, bvecs, mask=None, mask_wm=None, min_nvox : int, optional Minimal number of voxels needing to be identified as single fiber voxels in the automatic estimation. Defaults to 300. - roi_radius : int, optional - Use this radius to select single fibers from the tensor to estimate - the FRF. The roi will be a cube spanning from the middle of the volume - in each direction. Defaults to 10. + roi_radii : int or array-like (3,), optional + Use those radii to select a cuboid roi to estimate the FRF. The roi + will be a cuboid spanning from the middle of the volume in each + direction with the different radii. Defaults to 10. roi_center : tuple(3), optional Use this center to span the roi of size roi_radius (center of the 3D volume). @@ -99,11 +99,12 @@ def compute_ssst_frf(data, bvals, bvecs, mask=None, mask_wm=None, # We use an epsilon since the -= 0.05 might incur numerical imprecision. nvox = 0 while nvox < min_nvox and fa_thresh >= min_fa_thresh - 0.00001: - response, ratio, nvox = auto_response(gtab, data, - roi_center=roi_center, - roi_radius=roi_radius, - fa_thr=fa_thresh, - return_number_of_voxels=True) + mask = mask_for_response_ssst(gtab, data, + roi_center=roi_center, + roi_radii=roi_radii, + fa_thr=fa_thresh) + nvox = np.sum(mask) + response, ratio = response_from_mask_ssst(gtab, data, mask) logging.debug( "Number of indices is {:d} with threshold of {:.2f}".format( @@ -128,167 +129,3 @@ def compute_ssst_frf(data, bvals, bvecs, mask=None, mask_wm=None, response[0][2], response[1]]) return full_response - - -def compute_msmt_frf(data, bvals, bvecs, data_dti=None, bvals_dti=None, - bvecs_dti=None, mask=None, mask_wm=None, mask_gm=None, - mask_csf=None, fa_thr_wm=0.7, fa_thr_gm=0.2, - fa_thr_csf=0.1, md_thr_gm=0.0007, md_thr_csf=0.003, - min_nvox=300, roi_radii=10, roi_center=None, - tol=20, force_b0_threshold=False): - """Compute a single-shell (under b=1500), single-tissue single Fiber - Response Function from a DWI volume. - A DTI fit is made, and voxels containing a single fiber population are - found using a threshold on the FA. - - Parameters - ---------- - data : ndarray - 4D Input diffusion volume with shape (X, Y, Z, N) - bvals : ndarray - 1D bvals array with shape (N,) - bvecs : ndarray - 2D bvecs array with shape (N, 3) - mask : ndarray, optional - 3D mask with shape (X,Y,Z) - Binary mask. Only the data inside the mask will be used for - computations and reconstruction. - mask_wm : ndarray, optional - 3D mask with shape (X,Y,Z) - Binary white matter mask. Only the data inside this mask will be used - to estimate the fiber response function of WM. - mask_gm : ndarray, optional - 3D mask with shape (X,Y,Z) - Binary grey matter mask. Only the data inside this mask will be used - to estimate the fiber response function of GM. - mask_csf : ndarray, optional - 3D mask with shape (X,Y,Z) - Binary csf mask. Only the data inside this mask will be used to - estimate the fiber response function of CSF. - fa_thr_wm : float, optional - Use this threshold to select single WM fiber voxels from the FA inside - the WM mask defined by mask_wm. Each voxel above this threshold will be - selected. Defaults to 0.7 - fa_thr_gm : float, optional - Use this threshold to select GM voxels from the FA inside the GM mask - defined by mask_gm. Each voxel below this threshold will be selected. - Defaults to 0.2 - fa_thr_csf : float, optional - Use this threshold to select CSF voxels from the FA inside the CSF mask - defined by mask_csf. Each voxel below this threshold will be selected. - Defaults to 0.1 - md_thr_gm : float, optional - Use this threshold to select GM voxels from the MD inside the GM mask - defined by mask_gm. Each voxel below this threshold will be selected. - Defaults to 0.0007 - md_thr_csf : float, optional - Use this threshold to select CSF voxels from the MD inside the CSF mask - defined by mask_csf. Each voxel below this threshold will be selected. - Defaults to 0.003 - min_nvox : int, optional - Minimal number of voxels needing to be identified as single fiber - voxels in the automatic estimation. Defaults to 300. - roi_radii : int or array-like (3,), optional - Use those radii to select a cuboid roi to estimate the FRF. The roi - will be a cuboid spanning from the middle of the volume in each - direction with the different radii. Defaults to 10. - roi_center : tuple(3), optional - Use this center to span the roi of size roi_radius (center of the - 3D volume). - tol : int - tolerance gap for b-values clustering. Defaults to 20 - force_b0_threshold : bool, optional - If set, will continue even if the minimum bvalue is suspiciously high. - - Returns - ------- - reponses : list of ndarray - Fiber Response Function of each (3) tissue type, with shape (4, N). - frf_masks : list of ndarray - Mask where the frf was calculated, for each (3) tissue type, with - shape (X, Y, Z). - - Raises - ------ - ValueError - If less than `min_nvox` voxels were found with sufficient FA to - estimate the FRF. - """ - if not is_normalized_bvecs(bvecs): - logging.warning('Your b-vectors do not seem normalized...') - bvecs = normalize_bvecs(bvecs) - - check_b0_threshold(force_b0_threshold, bvals.min()) - - gtab = gradient_table(bvals, bvecs) - - if data_dti is None and bvals_dti is None and bvecs_dti is None: - logging.warning( - "No data specific to DTI was given. If b-values go over 1200, " - "this might produce wrong results.") - wm_frf_mask, gm_frf_mask, csf_frf_mask \ - = mask_for_response_msmt(gtab, data, - roi_center=roi_center, - roi_radii=roi_radii, - wm_fa_thr=fa_thr_wm, - gm_fa_thr=fa_thr_gm, - csf_fa_thr=fa_thr_csf, - gm_md_thr=md_thr_gm, - csf_md_thr=md_thr_csf) - elif ( - data_dti is not None and - bvals_dti is not None and - bvecs_dti is not None - ): - if not is_normalized_bvecs(bvecs_dti): - logging.warning('Your b-vectors do not seem normalized...') - bvecs_dti = normalize_bvecs(bvecs_dti) - - check_b0_threshold(force_b0_threshold, bvals_dti.min()) - gtab_dti = gradient_table(bvals_dti, bvecs_dti) - - wm_frf_mask, gm_frf_mask, csf_frf_mask \ - = mask_for_response_msmt(gtab_dti, data_dti, - roi_center=roi_center, - roi_radii=roi_radii, - wm_fa_thr=fa_thr_wm, - gm_fa_thr=fa_thr_gm, - csf_fa_thr=fa_thr_csf, - gm_md_thr=md_thr_gm, - csf_md_thr=md_thr_csf) - else: - msg = """Input not valid. Either give no _dti input, or give all - data_dti, bvals_dti and bvecs_dti.""" - raise ValueError(msg) - - if mask is not None: - wm_frf_mask *= mask - gm_frf_mask *= mask - csf_frf_mask *= mask - if mask_wm is not None: - wm_frf_mask *= mask_wm - if mask_gm is not None: - gm_frf_mask *= mask_gm - if mask_csf is not None: - csf_frf_mask *= mask_csf - - msg = """Could not find at least {0} voxels for the {1} mask. Look at - previous warnings or be sure that external tissue masks overlap with the - cuboid ROI.""" - - if np.sum(wm_frf_mask) < min_nvox: - raise ValueError(msg.format(min_nvox, "WM")) - if np.sum(gm_frf_mask) < min_nvox: - raise ValueError(msg.format(min_nvox, "GM")) - if np.sum(csf_frf_mask) < min_nvox: - raise ValueError(msg.format(min_nvox, "CSF")) - - frf_masks = [wm_frf_mask, gm_frf_mask, csf_frf_mask] - - response_wm, response_gm, response_csf \ - = response_from_mask_msmt(gtab, data, wm_frf_mask, gm_frf_mask, - csf_frf_mask, tol=tol) - - responses = [response_wm, response_gm, response_csf] - - return responses, frf_masks diff --git a/scripts/scil_compute_msmt_fodf.py b/scripts/scil_compute_msmt_fodf.py deleted file mode 100644 index 68c9b2547..000000000 --- a/scripts/scil_compute_msmt_fodf.py +++ /dev/null @@ -1,206 +0,0 @@ -#! /usr/bin/env python -# -*- coding: utf-8 -*- - -""" -Script to compute Multishell Multi-tissue Constrained Spherical Deconvolution -ODFs. - -By default, will output all possible files, using default names. -Specific names can be specified using the file flags specified in the -"File flags" section. - -If --not_all is set, only the files specified explicitly by the flags -will be output. - -Based on B. Jeurissen et al., Multi-tissue constrained spherical -deconvolution for improved analysis of multi-shell diffusion -MRI data. Neuroimage (2014) -""" - -import argparse -import logging - -from dipy.core.gradients import gradient_table, unique_bvals_tolerance -from dipy.data import get_sphere -from dipy.io.gradients import read_bvals_bvecs -from dipy.reconst.mcsd import MultiShellDeconvModel, multi_shell_fiber_response -import nibabel as nib -import numpy as np - -from scilpy.io.image import get_data_as_mask -from scilpy.io.utils import (add_overwrite_arg, assert_inputs_exist, - assert_outputs_exist, add_force_b0_arg, - add_sh_basis_args, add_processes_arg) -from scilpy.reconst.multi_processes import fit_from_model, convert_sh_basis -from scilpy.utils.bvec_bval_tools import (check_b0_threshold, normalize_bvecs, - is_normalized_bvecs) - - -def _build_arg_parser(): - - p = argparse.ArgumentParser(description=__doc__, - formatter_class=argparse.RawTextHelpFormatter) - - p.add_argument('input', - help='Path of the input diffusion volume.') - p.add_argument('bvals', - help='Path of the bvals file, in FSL format.') - p.add_argument('bvecs', - help='Path of the bvecs file, in FSL format.') - p.add_argument('wm_frf_file', - help='Text file of WM response function.') - p.add_argument('gm_frf_file', - help='Text file of GM response function.') - p.add_argument('csf_frf_file', - help='Text file of CSF response function.') - - p.add_argument( - '--sh_order', metavar='int', default=8, type=int, - help='SH order used for the CSD. (Default: 8)') - p.add_argument( - '--mask', metavar='', - help='Path to a binary mask. Only the data inside the ' - 'mask will be used for computations and reconstruction.') - - add_force_b0_arg(p) - add_sh_basis_args(p) - add_processes_arg(p) - add_overwrite_arg(p) - - p.add_argument( - '--not_all', action='store_true', - help='If set, only saves the files specified using the ' - 'file flags. (Default: False)') - - g = p.add_argument_group(title='File flags') - - g.add_argument( - '--wm_out_fODF', metavar='file', default='', - help='Output filename for the WM fODF coefficients.') - g.add_argument( - '--gm_out_fODF', metavar='file', default='', - help='Output filename for the GM fODF coefficients.') - g.add_argument( - '--csf_out_fODF', metavar='file', default='', - help='Output filename for the CSF fODF coefficients.') - g.add_argument( - '--vf', metavar='file', default='', - help='Output filename for the volume fractions map.') - - return p - - -def main(): - parser = _build_arg_parser() - args = parser.parse_args() - logging.basicConfig(level=logging.INFO) - - if not args.not_all: - args.wm_out_fODF = args.wm_out_fODF or 'wm_fODF.nii.gz' - args.gm_out_fODF = args.gm_out_fODF or 'gm_fodf.nii.gz' - args.csf_out_fODF = args.csf_out_fODF or 'csf_fodf.nii.gz' - args.vf = args.vf or 'vf.nii.gz' - - arglist = [args.wm_out_fODF, args.gm_out_fODF, args.csf_out_fODF, args.vf] - if args.not_all and not any(arglist): - parser.error('When using --not_all, you need to specify at least ' + - 'one file to output.') - - assert_inputs_exist(parser, [args.input, args.bvals, args.bvecs, - args.wm_frf_file, args.gm_frf_file, - args.csf_frf_file]) - assert_outputs_exist(parser, args, arglist) - - # Loading data - wm_frf = np.loadtxt(args.wm_frf_file) - gm_frf = np.loadtxt(args.gm_frf_file) - csf_frf = np.loadtxt(args.csf_frf_file) - vol = nib.load(args.input) - data = vol.get_fdata(dtype=np.float32) - bvals, bvecs = read_bvals_bvecs(args.bvals, args.bvecs) - - # Checking mask - if args.mask is None: - mask = None - else: - mask = get_data_as_mask(nib.load(args.mask), dtype=bool) - if mask.shape != data.shape[:-1]: - raise ValueError("Mask is not the same shape as data.") - - sh_order = args.sh_order - - # Checking data and sh_order - check_b0_threshold(args.force_b0_threshold, bvals.min()) - if data.shape[-1] < (sh_order + 1) * (sh_order + 2) / 2: - logging.warning( - 'We recommend having at least {} unique DWIs volumes, but you ' - 'currently have {} volumes. Try lowering the parameter --sh_order ' - 'in case of non convergence.'.format( - (sh_order + 1) * (sh_order + 2) / 2, data.shape[-1])) - - # Checking bvals, bvecs values and loading gtab - if not is_normalized_bvecs(bvecs): - logging.warning('Your b-vectors do not seem normalized...') - bvecs = normalize_bvecs(bvecs) - gtab = gradient_table(bvals, bvecs, b0_threshold=bvals.min()) - - # Checking response functions and computing msmt response function - if not wm_frf.shape[1] == 4: - raise ValueError('WM frf file did not contain 4 elements. ' - 'Invalid or deprecated FRF format') - if not gm_frf.shape[1] == 4: - raise ValueError('GM frf file did not contain 4 elements. ' - 'Invalid or deprecated FRF format') - if not csf_frf.shape[1] == 4: - raise ValueError('CSF frf file did not contain 4 elements. ' - 'Invalid or deprecated FRF format') - ubvals = unique_bvals_tolerance(bvals, tol=20) - msmt_response = multi_shell_fiber_response(sh_order, ubvals, - wm_frf, gm_frf, csf_frf) - - # Loading spheres - reg_sphere = get_sphere('symmetric362') - - # Computing msmt-CSD - msmt_model = MultiShellDeconvModel(gtab, msmt_response, - reg_sphere=reg_sphere, - sh_order=sh_order) - - # Computing msmt-CSD fit - msmt_fit = fit_from_model(msmt_model, data, - mask=mask, nbr_processes=args.nbr_processes) - - # Saving results - if args.wm_out_fODF: - shm_coeff = msmt_fit.shm_coeff - if args.sh_basis == 'tournier07': - shm_coeff = convert_sh_basis(shm_coeff, reg_sphere, mask=mask, - nbr_processes=args.nbr_processes) - nib.save(nib.Nifti1Image(shm_coeff.astype(np.float32), - vol.affine), args.wm_out_fODF) - - if args.gm_out_fODF: - shm_coeff = msmt_fit.all_shm_coeff[..., 1] - if args.sh_basis == 'tournier07': - shm_coeff = shm_coeff.reshape(shm_coeff.shape + (1,)) - shm_coeff = convert_sh_basis(shm_coeff, reg_sphere, mask=mask, - nbr_processes=args.nbr_processes) - nib.save(nib.Nifti1Image(shm_coeff.astype(np.float32), - vol.affine), args.gm_out_fODF) - - if args.csf_out_fODF: - shm_coeff = msmt_fit.all_shm_coeff[..., 0] - if args.sh_basis == 'tournier07': - shm_coeff = shm_coeff.reshape(shm_coeff.shape + (1,)) - shm_coeff = convert_sh_basis(shm_coeff, reg_sphere, mask=mask, - nbr_processes=args.nbr_processes) - nib.save(nib.Nifti1Image(shm_coeff.astype(np.float32), - vol.affine), args.csf_out_fODF) - - if args.vf: - nib.save(nib.Nifti1Image(msmt_fit.volume_fractions.astype(np.float32), - vol.affine), args.vf) - - -if __name__ == "__main__": - main() diff --git a/scripts/scil_compute_msmt_frf.py b/scripts/scil_compute_msmt_frf.py deleted file mode 100644 index f1ac2c140..000000000 --- a/scripts/scil_compute_msmt_frf.py +++ /dev/null @@ -1,243 +0,0 @@ -#! /usr/bin/env python -# -*- coding: utf-8 -*- - -""" -Compute response functions for multi-shell multi-tissue (MSMT) -constrained spherical deconvolution from DWI data. - -The script computes a response function for white-matter (wm), -gray-matter (gm), csf and the mean b=0. - -In the wm, we compute the response function in each voxels where -the FA is superior at threshold_fa_wm. - -In the gm (or csf), we compute the response function in each voxels where -the FA is below at threshold_fa_gm (or threshold_fa_csf) and where -the MD is below threshold_md_gm (or threshold_md_csf). - -Based on B. Jeurissen et al., Multi-tissue constrained spherical -deconvolution for improved analysis of multi-shell diffusion -MRI data. Neuroimage (2014) -""" - -import argparse -import logging - -from dipy.core.gradients import unique_bvals_tolerance -from dipy.io.gradients import read_bvals_bvecs -import nibabel as nib -import numpy as np - -from scilpy.io.image import get_data_as_mask -from scilpy.io.utils import (add_force_b0_arg, - add_overwrite_arg, add_verbose_arg, - assert_inputs_exist, assert_outputs_exist) -from scilpy.reconst.frf import compute_msmt_frf -from scilpy.utils.bvec_bval_tools import extract_dwi_shell - - -def buildArgsParser(): - - p = argparse.ArgumentParser(description=__doc__, - formatter_class=argparse.RawTextHelpFormatter) - - p.add_argument('input', - help='Path to the input diffusion volume.') - p.add_argument('bvals', - help='Path to the bvals file, in FSL format.') - p.add_argument('bvecs', - help='Path to the bvecs file, in FSL format.') - p.add_argument('wm_frf_file', - help='Path to the output WM frf file, in .txt format.') - p.add_argument('gm_frf_file', - help='Path to the output GM frf file, in .txt format.') - p.add_argument('csf_frf_file', - help='Path to the output CSF frf file, in .txt format.') - - p.add_argument( - '--mask', - help='Path to a binary mask. Only the data inside the mask will be ' - 'used for computations and reconstruction. Useful if no tissue ' - 'masks are available.') - p.add_argument( - '--mask_wm', - help='Path to the WM mask file.') - p.add_argument( - '--mask_gm', - help='Path to the GM mask file.') - p.add_argument( - '--mask_csf', - help='Path to the CSF mask file.') - - p.add_argument( - '--fa_thr_wm', default=0.7, type=float, - help='If supplied, use this threshold to select single WM fiber ' - 'voxels from the FA inside the WM mask defined by mask_wm. Each ' - 'voxel above this threshold will be selected. [%(default)s]') - p.add_argument( - '--fa_thr_gm', default=0.2, type=float, - help='If supplied, use this threshold to select GM voxels from the FA ' - 'inside the GM mask defined by mask_gm. Each voxel below this ' - 'threshold will be selected. [%(default)s]') - p.add_argument( - '--fa_thr_csf', default=0.1, type=float, - help='If supplied, use this threshold to select CSF voxels from the ' - 'FA inside the CSF mask defined by mask_csf. Each voxel below ' - 'this threshold will be selected. [%(default)s]') - p.add_argument( - '--md_thr_gm', default=0.0007, type=float, - help='If supplied, use this threshold to select GM voxels from the MD ' - 'inside the GM mask defined by mask_gm. Each voxel below this ' - 'threshold will be selected. [%(default)s]') - p.add_argument( - '--md_thr_csf', default=0.003, type=float, - help='If supplied, use this threshold to select CSF voxels from the ' - 'MD inside the CSF mask defined by mask_csf. Each voxel below ' - 'this threshold will be selected. [%(default)s]') - - p.add_argument( - '--min_nvox', default=100, type=int, - help='Minimal number of voxels needed for each tissue masks ' - 'in order to proceed to frf estimation. [%(default)s]') - p.add_argument( - '--tolerance', type=int, default=20, - help='The tolerated gap between the b-values to ' - 'extract\nand the current b-value. [%(default)s]') - p.add_argument( - '--roi_radii', default=[10], nargs='+', type=int, - help='If supplied, use those radii to select a cuboid roi ' - 'to estimate the response functions. The roi will be ' - 'a cuboid spanning from the middle of the volume in ' - 'each direction with the different radii. The type is ' - 'either an int or an array-like (3,). [%(default)s]') - p.add_argument( - '--roi_center', metavar='tuple(3)', nargs=3, type=int, - help='If supplied, use this center to span the cuboid roi ' - 'using roi_radii. [center of the 3D volume]') - - p.add_argument( - '--wm_frf_mask', metavar='file', default='', - help='Path to the output WM frf mask file, the voxels used ' - 'to compute the WM frf.') - p.add_argument( - '--gm_frf_mask', metavar='file', default='', - help='Path to the output GM frf mask file, the voxels used ' - 'to compute the GM frf.') - p.add_argument( - '--csf_frf_mask', metavar='file', default='', - help='Path to the output CSF frf mask file, the voxels used ' - 'to compute the CSF frf.') - - p.add_argument( - '--frf_table', metavar='file', default='', - help='Path to the output frf table file. Saves the frf for ' - 'each b-value, in .txt format.') - - add_force_b0_arg(p) - add_overwrite_arg(p) - add_verbose_arg(p) - - return p - - -def main(): - - parser = buildArgsParser() - args = parser.parse_args() - - if args.verbose: - logging.basicConfig(level=logging.DEBUG) - else: - logging.basicConfig(level=logging.INFO) - - assert_inputs_exist(parser, [args.input, args.bvals, args.bvecs]) - assert_outputs_exist(parser, args, [args.wm_frf_file, args.gm_frf_file, - args.csf_frf_file]) - - if len(args.roi_radii) == 1: - roi_radii = args.roi_radii[0] - elif len(args.roi_radii) == 2: - parser.error('--roi_radii cannot be of size (2,).') - else: - roi_radii = args.roi_radii - roi_center = args.roi_center - - vol = nib.load(args.input) - data = vol.get_fdata(dtype=np.float32) - bvals, bvecs = read_bvals_bvecs(args.bvals, args.bvecs) - - tol = args.tolerance - - list_bvals = unique_bvals_tolerance(bvals, tol=tol) - if not np.all(list_bvals <= 1200): - outputs = extract_dwi_shell(vol, bvals, bvecs, - list_bvals[list_bvals <= 1200], - tol=tol) - _, data_dti, bvals_dti, bvecs_dti = outputs - bvals_dti = np.squeeze(bvals_dti) - else: - data_dti = None - bvals_dti = None - bvecs_dti = None - - mask = None - if args.mask is not None: - mask = get_data_as_mask(nib.load(args.mask), dtype=bool) - if mask.shape != data.shape[:-1]: - raise ValueError("Mask is not the same shape as data.") - mask_wm = None - mask_gm = None - mask_csf = None - if args.mask_wm: - mask_wm = get_data_as_mask(nib.load(args.mask_wm), dtype=bool) - if args.mask_gm: - mask_gm = get_data_as_mask(nib.load(args.mask_gm), dtype=bool) - if args.mask_csf: - mask_csf = get_data_as_mask(nib.load(args.mask_csf), dtype=bool) - - force_b0_thr = args.force_b0_threshold - responses, frf_masks = compute_msmt_frf(data, bvals, bvecs, - data_dti=data_dti, - bvals_dti=bvals_dti, - bvecs_dti=bvecs_dti, - mask=mask, mask_wm=mask_wm, - mask_gm=mask_gm, mask_csf=mask_csf, - fa_thr_wm=args.fa_thr_wm, - fa_thr_gm=args.fa_thr_gm, - fa_thr_csf=args.fa_thr_csf, - md_thr_gm=args.md_thr_gm, - md_thr_csf=args.md_thr_csf, - min_nvox=args.min_nvox, - roi_radii=roi_radii, - roi_center=roi_center, - tol=tol, - force_b0_threshold=force_b0_thr) - - masks_files = [args.wm_frf_mask, args.gm_frf_mask, args.csf_frf_mask] - for mask, mask_file in zip(frf_masks, masks_files): - if mask_file: - nib.save(nib.Nifti1Image(mask.astype(np.uint16), vol.get_affine()), - mask_file) - - frf_out = [args.wm_frf_file, args.gm_frf_file, args.csf_frf_file] - - for frf, response in zip(frf_out, responses): - np.savetxt(frf, response) - - if args.frf_table: - if list_bvals[0] < tol: - bvals = list_bvals[1:] - else: - bvals = list_bvals - response_csf = responses[2] - response_gm = responses[1] - response_wm = responses[0] - iso_responses = np.concatenate((response_csf[:, :3], - response_gm[:, :3]), axis=1) - responses = np.concatenate((iso_responses, response_wm[:, :3]), axis=1) - frf_table = np.vstack((bvals, responses.T)).T - np.savetxt(args.frf_table, frf_table) - - -if __name__ == "__main__": - main() diff --git a/scripts/scil_compute_fodf.py b/scripts/scil_compute_ssst_fodf.py similarity index 100% rename from scripts/scil_compute_fodf.py rename to scripts/scil_compute_ssst_fodf.py diff --git a/scripts/scil_compute_ssst_frf.py b/scripts/scil_compute_ssst_frf.py index 3df157412..64245bd00 100755 --- a/scripts/scil_compute_ssst_frf.py +++ b/scripts/scil_compute_ssst_frf.py @@ -65,10 +65,12 @@ def _build_arg_parser(): 'fiber voxels in the automatic estimation. [%(default)s]') p.add_argument( - '--roi_radius', default=10, type=int, - help='If supplied, use this radius to select single fibers from the ' - 'tensor to estimate the FRF. The roi will be a cube spanning ' - 'from the middle of the volume in each direction. [%(default)s]') + '--roi_radii', default=[10], nargs='+', type=int, + help='If supplied, use those radii to select a cuboid roi ' + 'to estimate the response functions. The roi will be ' + 'a cuboid spanning from the middle of the volume in ' + 'each direction with the different radii. The type is ' + 'either an int or an array-like (3,). [%(default)s]') p.add_argument( '--roi_center', metavar='tuple(3)', help='If supplied, use this center to span the roi of size ' @@ -92,6 +94,13 @@ def main(): assert_inputs_exist(parser, [args.input, args.bvals, args.bvecs]) assert_outputs_exist(parser, args, args.frf_file) + if len(args.roi_radii) == 1: + roi_radii = args.roi_radii[0] + elif len(args.roi_radii) == 2: + parser.error('--roi_radii cannot be of size (2,).') + else: + roi_radii = args.roi_radii + vol = nib.load(args.input) data = vol.get_fdata(dtype=np.float32) @@ -109,7 +118,7 @@ def main(): mask_wm=mask_wm, fa_thresh=args.fa_thresh, min_fa_thresh=args.min_fa_thresh, min_nvox=args.min_nvox, - roi_radius=args.roi_radius, + roi_radii=roi_radii, roi_center=args.roi_center, force_b0_threshold=args.force_b0_threshold) From 23c82cdcc4789fc2c431d3950a628e3701644b6a Mon Sep 17 00:00:00 2001 From: AntoineTheb Date: Tue, 22 Sep 2020 10:07:23 -0400 Subject: [PATCH 36/40] FIX: Update dipy version --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 871333f76..becc14a88 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ bz2file==0.98.* coloredlogs==10.0.* cycler==0.10.* Cython==0.29.* -dipy>=1.1.2 +dipy==1.2.* fury==0.2.* future==0.17.* h5py==2.9.* From f5c6730e1b644b53f93acc8bb4f46032cbaf5430 Mon Sep 17 00:00:00 2001 From: AntoineTheb Date: Tue, 22 Sep 2020 11:42:37 -0400 Subject: [PATCH 37/40] FIX: Fix Python3.8 yield-in-generator deprecation and move test --- .gitignore | 2 ++ scripts/scil_compress_streamlines.py | 4 ++-- .../{test_compute_fodf.py => test_compute_ssst_fodf.py} | 6 +++--- 3 files changed, 7 insertions(+), 5 deletions(-) rename scripts/tests/{test_compute_fodf.py => test_compute_ssst_fodf.py} (81%) diff --git a/.gitignore b/.gitignore index 861d1b77f..b27f4fa93 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,8 @@ *.pyo *.sublime* *.idea* +*.swp +*.swo # Byte-compiled / optimized / DLL files __pycache__/ diff --git a/scripts/scil_compress_streamlines.py b/scripts/scil_compress_streamlines.py index d6e6d539f..adcf2e177 100755 --- a/scripts/scil_compress_streamlines.py +++ b/scripts/scil_compress_streamlines.py @@ -38,8 +38,8 @@ def _build_arg_parser(): def compress_streamlines_wrapper(tractogram, error_rate): - return lambda: [(yield compress_streamlines( - s, error_rate)) for s in tractogram.streamlines] + return lambda: (compress_streamlines( + s, error_rate) for s in tractogram.streamlines) def main(): diff --git a/scripts/tests/test_compute_fodf.py b/scripts/tests/test_compute_ssst_fodf.py similarity index 81% rename from scripts/tests/test_compute_fodf.py rename to scripts/tests/test_compute_ssst_fodf.py index 5211d30a5..3e19a7cf0 100644 --- a/scripts/tests/test_compute_fodf.py +++ b/scripts/tests/test_compute_ssst_fodf.py @@ -12,7 +12,7 @@ def test_help_option(script_runner): - ret = script_runner.run('scil_compute_fodf.py', '--help') + ret = script_runner.run('scil_compute_ssst_fodf.py', '--help') assert ret.success @@ -26,7 +26,7 @@ def test_execution_processing(script_runner): '3000.bvec') in_frf = os.path.join(get_home(), 'processing', 'frf.txt') - ret = script_runner.run('scil_compute_fodf.py', in_dwi, in_bval, in_bvec, - in_frf, 'fodf.nii.gz', '--sh_order', '4', + ret = script_runner.run('scil_compute_ssst_fodf.py', in_dwi, in_bval, + in_bvec, in_frf, 'fodf.nii.gz', '--sh_order', '4', '--sh_basis', 'tournier07', '--processes', '1') assert ret.success From 9119ce407faf305371a17e670dc08485165dcd6d Mon Sep 17 00:00:00 2001 From: Guillaume Theaud Date: Tue, 22 Sep 2020 13:24:34 -0400 Subject: [PATCH 38/40] Reduce tests --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index ac4e41479..c1b027418 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,7 +17,8 @@ stage: Tests script: - python setup.py develop - export MPLBACKEND="agg" - - pytest -v + - pytest scripts/tests/test_compute_bundle_profiles.py + - pytest scripts/tests/test_compute_bundle_profiles.py jobs: From faa5e7e19cd5614eb6dbce7ba1ba714f63d777f0 Mon Sep 17 00:00:00 2001 From: Guillaume Theaud Date: Tue, 22 Sep 2020 13:52:08 -0400 Subject: [PATCH 39/40] Change encoding --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index c1b027418..0802ccdbf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,8 +17,8 @@ stage: Tests script: - python setup.py develop - export MPLBACKEND="agg" - - pytest scripts/tests/test_compute_bundle_profiles.py - - pytest scripts/tests/test_compute_bundle_profiles.py + - export PYTHONIOENCODING=utf8 + - pytest -v jobs: From d4c809b200ac15b0a7ec7994b6a5d6201225aa5b Mon Sep 17 00:00:00 2001 From: Guillaume Theaud Date: Tue, 22 Sep 2020 14:18:38 -0400 Subject: [PATCH 40/40] Fix --- .travis.yml | 1 - requirements.txt | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 0802ccdbf..ac4e41479 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,7 +17,6 @@ stage: Tests script: - python setup.py develop - export MPLBACKEND="agg" - - export PYTHONIOENCODING=utf8 - pytest -v diff --git a/requirements.txt b/requirements.txt index becc14a88..c6cb71285 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,7 +24,7 @@ vtk==9.0.1 trimeshpy==0.0.* coloredlogs==10.0.* nilearn==0.6.* -pytest==5.3.* +pytest==6.0.* pytest_console_scripts==0.2.* googledrivedownloader==0.* requests==2.23.*