123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329 |
- import os
- import nibabel as nib
- import numpy as np
- from nipype.interfaces.ants.segmentation import N4BiasFieldCorrection
- import math
- from sklearn.metrics import mutual_info_score
- from copy import copy
- # Makes directory if not already present.
- def make_dirs(paths):
- if isinstance(paths, str):
- if not os.path.exists(paths):
- os.makedirs(paths)
- print(paths + ' directory made.')
- elif isinstance(paths, list):
- for path in paths:
- if not os.path.exists(path):
- os.makedirs(path)
- print(path + ' directory made.')
- else:
- print('The given path was neither a single path or a list of paths')
- # Get list of paths of files in dir with specific file type.
- def files_in_dir(path, file_type=''):
- path_list = []
- for file in os.listdir(path):
- if file.startswith('.'):
- continue
- if file.endswith(file_type):
- path_list.append(path + file)
- return sorted(path_list)
- def files_ind_dir_wo_string(path, exclusion_string):
- path_list = []
- for file in os.listdir(path):
- if exclusion_string not in file:
- path_list.append(path + file)
- return sorted(path_list)
- # Get list of images with specific file type in folders. Also returns list of folders.
- def images_in_folders(path, file_tyoe=''):
- path_list = []
- folder_list = []
- for folder in os.listdir(path):
- folder_list.append(path + folder)
- for file in os.listdir(path + folder + '/'):
- if file.startswith('.'):
- continue
- if file.endswith(file_tyoe):
- path_list.append(path + folder + '/' + file)
- return sorted(path_list), sorted(folder_list) # sorted added by FFS 26/6/2019.
- # Make .nii image from raw data. Will give specific pixel dimensions, if given.
- def make_nii(data, pixdim=[]):
- nii = nib.Nifti1Image(data, np.eye(4))
- if len(pixdim) != 0:
- hdr = nii.header
- hdr['pixdim'] = pixdim
- nii = nib.Nifti1Pair(data, None, hdr)
- return nii
- # Make volume from list of consecutive images.
- def make_volume(images, output):
- init_image = nib.load(images[0])
- width, height = init_image.shape
- depth = len(images)
- init_pixdim = init_image.header['pixdim']
- volume_array = np.zeros((width, depth, height))
- for i, image in enumerate(images):
- image_data = np.rot90(nib.load(image).get_data(), 2)
- volume_array[:, depth - i - 1, :] = image_data
- nib.save(make_nii(volume_array, [1., init_pixdim[1], 0.100, init_pixdim[2], 1., 1., 1., 1.]), output)
- # Get parent directory.
- def get_parent_dir(path):
- folders = path.split('/')
- if folders[-1] == '':
- parent_dir = '/'.join(folders[0:-2]) + '/'
- else:
- parent_dir = '/'.join(folders[0:-1]) + '/'
- return parent_dir
- # Reorient MR
- def reorient_mr(MR, result, inverted=False):
- original = nib.load(MR)
- original_data = original.get_data()
- pixdim = original.header['pixdim']
- if inverted:
- reoriented_data = np.fliplr(np.rot90(original_data, 1, axes=(1, 2)))
- else:
- reoriented_data = np.rot90(original_data, 1, axes=(1, 2))
- nib.save(make_nii(reoriented_data, [1., pixdim[1], pixdim[3], pixdim[2], 1., 1., 1., 1.]), result)
- # Reorient atlas
- def reorient_atlas(atlas, result):
- original = nib.load(atlas)
- original_data = original.get_data()
- pixdim = original.header['pixdim']
- print(original_data.shape)
- reoriented_data = np.reshape(original_data, (456, 528, 320), order='A')
- print(reoriented_data.shape)
- nib.save(make_nii(reoriented_data, [1., pixdim[1], pixdim[3], pixdim[2], 1., 1., 1., 1.]), result)
- def slice_volume(volume, slices_path):
- current_mr = nib.load(volume)
- current_data = current_mr.get_data()
- number_of_slices = current_data.shape[1]
- pixdim = current_mr.header['pixdim']
- for i in range(number_of_slices):
- image_nr = i + 1
- image_nr_beauty = '0' + str(image_nr) if image_nr < 10 else str(image_nr)
- nib.save(make_nii(np.rot90(current_data[:, number_of_slices - image_nr, :], 2),
- [1., pixdim[1], pixdim[3], pixdim[2], 1., 1., 1., 1.]),
- slices_path + 'Template_S' + image_nr_beauty + '.nii')
- def bias_field_correction(image, output, dim=2):
- N4BiasFieldCorrection(
- input_image=image,
- output_image=output,
- dimension=dim
- ).run()
- def flip_volume_lr(image, output):
- nii = nib.load(image)
- hdr = nii.header
- data = nii.get_data()
- flipped = np.flip(data, axis=0)
- nib.save(nib.Nifti1Pair(flipped, None, hdr), output)
- def add_slash_to_path(path):
- return path + '/' if not path[-1] == '/' else path
- def identity_transform(work_dir, dimensions):
- path = work_dir + 'identity_transform.txt'
- identity_transform = open(path, 'w+')
- if dimensions == 3:
- identity_transform.write('Transform: MatrixOffsetTransformBase_double_3_3'
- '\nParameters: 1 0 0 0 1 0 0 0 1 0 0 0'
- '\nFixedParameters: 0 0 0\n')
- elif dimensions == 2:
- identity_transform.write('Transform: MatrixOffsetTransformBase_double_2_2'
- '\nParameters: 1 0 0 1 0 0'
- '\nFixedParameters: 0 0\n')
- else:
- print('Helper function identity_transform does only support 2d and 3d.')
- return path
- # Convert int to string with two places (1, 7, 13) --> (01, 07, 13).
- def pretty(i):
- return '0' + str(i) if i < 10 else str(i)
- # Returns True i given path exists.
- def path_exists(path):
- return True if os.path.exists(path) else False
- def robust_avg_volumes(volumes, output):
- # Determine images matrix dimensions.
- depth_list = []
- init_nii = nib.load(volumes[0])
- pix_dim = init_nii.header['pixdim']
- a, b, c = init_nii.shape
- d = len(volumes)
- for volume_shape in volumes:
- depth_list.append(nib.load(volume_shape).shape[1])
- b = max(depth_list)
- matrix_shape = (a, b, c, d)
- images_matrix = np.zeros(matrix_shape)
- print('Average:')
- for i, volume in enumerate(volumes):
- print('Adding', volume)
- nii = nib.load(volume)
- images_matrix[:, 0:nii.shape[1], :, i] = nii.get_data()
- # Calculate mean and std.
- images_mean = np.mean(images_matrix, axis=3)
- images_std = images_matrix.std(3)
- images_std_scaled = images_std * 2
- # Duplicate mean and std to full dimensions for easy matching.
- mean_matrix = np.repeat(images_mean[:, :, :, np.newaxis], d, axis=3)
- scaled_std_matrix = np.repeat(images_std_scaled[:, :, :, np.newaxis], d, axis=3)
- robust_matrix = np.where(
- (images_matrix < mean_matrix - scaled_std_matrix) | (images_matrix > mean_matrix + scaled_std_matrix),
- np.nan,
- images_matrix)
- robust_mean = np.nanmedian(robust_matrix, axis=3)
- nib.save(make_nii(robust_mean, pixdim=pix_dim), output)
- def split_images(images, output_folder, max_depth=50):
- temp_folder = output_folder
- make_dirs(temp_folder)
- for image in images:
- # Extract name from path.
- name = image.split('/')[-1].split('.')[0]
- # Load nii, header and data.
- nii = nib.load(image)
- header = nii.header
- data = nii.get_data()
- # Get depth from header and calculate number of parts.
- depth = nii.shape[1]
- nr_of_parts = math.ceil(depth / max_depth)
- # If first iteration create empty list of list of parts.
- if image == images[0]:
- parts = [[] for _ in range(nr_of_parts)]
- output_shape = nii.shape
- output_header = copy(header)
- # Make parts.
- for i in range(nr_of_parts):
- # Define form and to index.
- from_i = i * max_depth
- to_i = min((i + 1) * max_depth - 1, depth)
- # Calculate depth of current part. Set in nii header.
- cur_depth = to_i - from_i + 1
- header['dim'][2] = cur_depth
- # Slice data.
- cur_data = data[:, from_i:to_i+1, :]
- cur_path = temp_folder + name + '_' + str(i) + '.nii'
- # Append to parts list.
- parts[i] += [cur_path]
- # Save part.
- nib.save(nib.Nifti1Image(cur_data, None, header), cur_path)
- return parts, output_shape, output_header
- def robust_large_images(images, output, max_depth=50):
- temp_folder = get_parent_dir(images[0]) + 'avg_temp_volumes/'
- parts, output_shape, output_header = split_images(images, temp_folder, max_depth=max_depth)
- avgs = []
- for n, part in enumerate(parts):
- print('Averaging: ' + str(part))
- cur_avg = temp_folder + str(n) + '_avg.nii'
- robust_avg_volumes(part, cur_avg)
- avgs += [cur_avg]
- final_avg = np.zeros(output_shape)
- for m, avg in enumerate(avgs):
- print('Appending avg: ' + str(avg))
- final_avg[:, m*max_depth:min((m+1)*max_depth, output_shape[1]), :] = nib.load(avg).get_data()
- nib.save(nib.Nifti1Image(final_avg, None, output_header), output)
- def avg_template(image, template, template_weight, output):
- print(image, template)
- a = nib.load(image)
- b = nib.load(template)
- pix_dim = a.header['pixdim']
- depth = max([a.shape[1], b.shape[1]])
- a_scaled = np.zeros((a.shape[0], depth, a.shape[2]))
- b_scaled = np.zeros((a.shape[0], depth, a.shape[2]))
- a_scaled[:a.shape[0], :a.shape[1], :a.shape[2]] = a.get_data() * (1 - template_weight)
- b_scaled[:b.shape[0], :b.shape[1], :b.shape[2]] = b.get_data() * template_weight
- weighted = (a_scaled + b_scaled) / 2
- nib.save(make_nii(weighted, pixdim=pix_dim), output)
- def normal_list_1d(sigma, n, mu=0.5, max_is_one=False):
- """
- Normal distribution.
- :param sigma: sigma.
- :param n: number of values.
- :param mu: [0.0:1.0] a float determining the relative position of the peak. 0.0 left, 1.0 right. Default: 0.5.
- :return: [x] a list of the natural numbers from 0 to n. [y]: normal value
- """
- mu = mu*n - 0.5
- x = [x for x in range(n)]
- y = [1 / (math.sqrt(2 * math.pi * sigma ** 2)) * math.e ** ((-(xi - mu) ** 2) / (2 * sigma ** 2)) for xi in x]
- if max_is_one:
- max_y = max(y)
- y_adjusted = [yi * 1/max_y for yi in y]
- return x, y_adjusted
- return x, y
- def mutual_information(x, y, bins):
- c_xy = np.histogram2d(x.ravel(), y.ravel(), bins)[0]
- mi = mutual_info_score(None, None, contingency=c_xy)
- return mi
- def immediate_sub_dirs(directory, full_path=True):
- sub_dirs = sorted(next(os.walk(directory))[1])
- if full_path:
- return [directory + x + '/' for x in sub_dirs]
- else:
- return sub_dirs
|