Scheduled service maintenance on November 22


On Friday, November 22, 2024, between 06:00 CET and 18:00 CET, GIN services will undergo planned maintenance. Extended service interruptions should be expected. We will try to keep downtimes to a minimum, but recommend that users avoid critical tasks, large data uploads, or DOI requests during this time.

We apologize for any inconvenience.

helper_functions.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329
  1. import os
  2. import nibabel as nib
  3. import numpy as np
  4. from nipype.interfaces.ants.segmentation import N4BiasFieldCorrection
  5. import math
  6. from sklearn.metrics import mutual_info_score
  7. from copy import copy
  8. # Makes directory if not already present.
  9. def make_dirs(paths):
  10. if isinstance(paths, str):
  11. if not os.path.exists(paths):
  12. os.makedirs(paths)
  13. print(paths + ' directory made.')
  14. elif isinstance(paths, list):
  15. for path in paths:
  16. if not os.path.exists(path):
  17. os.makedirs(path)
  18. print(path + ' directory made.')
  19. else:
  20. print('The given path was neither a single path or a list of paths')
  21. # Get list of paths of files in dir with specific file type.
  22. def files_in_dir(path, file_type=''):
  23. path_list = []
  24. for file in os.listdir(path):
  25. if file.startswith('.'):
  26. continue
  27. if file.endswith(file_type):
  28. path_list.append(path + file)
  29. return sorted(path_list)
  30. def files_ind_dir_wo_string(path, exclusion_string):
  31. path_list = []
  32. for file in os.listdir(path):
  33. if exclusion_string not in file:
  34. path_list.append(path + file)
  35. return sorted(path_list)
  36. # Get list of images with specific file type in folders. Also returns list of folders.
  37. def images_in_folders(path, file_tyoe=''):
  38. path_list = []
  39. folder_list = []
  40. for folder in os.listdir(path):
  41. folder_list.append(path + folder)
  42. for file in os.listdir(path + folder + '/'):
  43. if file.startswith('.'):
  44. continue
  45. if file.endswith(file_tyoe):
  46. path_list.append(path + folder + '/' + file)
  47. return sorted(path_list), sorted(folder_list) # sorted added by FFS 26/6/2019.
  48. # Make .nii image from raw data. Will give specific pixel dimensions, if given.
  49. def make_nii(data, pixdim=[]):
  50. nii = nib.Nifti1Image(data, np.eye(4))
  51. if len(pixdim) != 0:
  52. hdr = nii.header
  53. hdr['pixdim'] = pixdim
  54. nii = nib.Nifti1Pair(data, None, hdr)
  55. return nii
  56. # Make volume from list of consecutive images.
  57. def make_volume(images, output):
  58. init_image = nib.load(images[0])
  59. width, height = init_image.shape
  60. depth = len(images)
  61. init_pixdim = init_image.header['pixdim']
  62. volume_array = np.zeros((width, depth, height))
  63. for i, image in enumerate(images):
  64. image_data = np.rot90(nib.load(image).get_data(), 2)
  65. volume_array[:, depth - i - 1, :] = image_data
  66. nib.save(make_nii(volume_array, [1., init_pixdim[1], 0.100, init_pixdim[2], 1., 1., 1., 1.]), output)
  67. # Get parent directory.
  68. def get_parent_dir(path):
  69. folders = path.split('/')
  70. if folders[-1] == '':
  71. parent_dir = '/'.join(folders[0:-2]) + '/'
  72. else:
  73. parent_dir = '/'.join(folders[0:-1]) + '/'
  74. return parent_dir
  75. # Reorient MR
  76. def reorient_mr(MR, result, inverted=False):
  77. original = nib.load(MR)
  78. original_data = original.get_data()
  79. pixdim = original.header['pixdim']
  80. if inverted:
  81. reoriented_data = np.fliplr(np.rot90(original_data, 1, axes=(1, 2)))
  82. else:
  83. reoriented_data = np.rot90(original_data, 1, axes=(1, 2))
  84. nib.save(make_nii(reoriented_data, [1., pixdim[1], pixdim[3], pixdim[2], 1., 1., 1., 1.]), result)
  85. # Reorient atlas
  86. def reorient_atlas(atlas, result):
  87. original = nib.load(atlas)
  88. original_data = original.get_data()
  89. pixdim = original.header['pixdim']
  90. print(original_data.shape)
  91. reoriented_data = np.reshape(original_data, (456, 528, 320), order='A')
  92. print(reoriented_data.shape)
  93. nib.save(make_nii(reoriented_data, [1., pixdim[1], pixdim[3], pixdim[2], 1., 1., 1., 1.]), result)
  94. def slice_volume(volume, slices_path):
  95. current_mr = nib.load(volume)
  96. current_data = current_mr.get_data()
  97. number_of_slices = current_data.shape[1]
  98. pixdim = current_mr.header['pixdim']
  99. for i in range(number_of_slices):
  100. image_nr = i + 1
  101. image_nr_beauty = '0' + str(image_nr) if image_nr < 10 else str(image_nr)
  102. nib.save(make_nii(np.rot90(current_data[:, number_of_slices - image_nr, :], 2),
  103. [1., pixdim[1], pixdim[3], pixdim[2], 1., 1., 1., 1.]),
  104. slices_path + 'Template_S' + image_nr_beauty + '.nii')
  105. def bias_field_correction(image, output, dim=2):
  106. N4BiasFieldCorrection(
  107. input_image=image,
  108. output_image=output,
  109. dimension=dim
  110. ).run()
  111. def flip_volume_lr(image, output):
  112. nii = nib.load(image)
  113. hdr = nii.header
  114. data = nii.get_data()
  115. flipped = np.flip(data, axis=0)
  116. nib.save(nib.Nifti1Pair(flipped, None, hdr), output)
  117. def add_slash_to_path(path):
  118. return path + '/' if not path[-1] == '/' else path
  119. def identity_transform(work_dir, dimensions):
  120. path = work_dir + 'identity_transform.txt'
  121. identity_transform = open(path, 'w+')
  122. if dimensions == 3:
  123. identity_transform.write('Transform: MatrixOffsetTransformBase_double_3_3'
  124. '\nParameters: 1 0 0 0 1 0 0 0 1 0 0 0'
  125. '\nFixedParameters: 0 0 0\n')
  126. elif dimensions == 2:
  127. identity_transform.write('Transform: MatrixOffsetTransformBase_double_2_2'
  128. '\nParameters: 1 0 0 1 0 0'
  129. '\nFixedParameters: 0 0\n')
  130. else:
  131. print('Helper function identity_transform does only support 2d and 3d.')
  132. return path
  133. # Convert int to string with two places (1, 7, 13) --> (01, 07, 13).
  134. def pretty(i):
  135. return '0' + str(i) if i < 10 else str(i)
  136. # Returns True i given path exists.
  137. def path_exists(path):
  138. return True if os.path.exists(path) else False
  139. def robust_avg_volumes(volumes, output):
  140. # Determine images matrix dimensions.
  141. depth_list = []
  142. init_nii = nib.load(volumes[0])
  143. pix_dim = init_nii.header['pixdim']
  144. a, b, c = init_nii.shape
  145. d = len(volumes)
  146. for volume_shape in volumes:
  147. depth_list.append(nib.load(volume_shape).shape[1])
  148. b = max(depth_list)
  149. matrix_shape = (a, b, c, d)
  150. images_matrix = np.zeros(matrix_shape)
  151. print('Average:')
  152. for i, volume in enumerate(volumes):
  153. print('Adding', volume)
  154. nii = nib.load(volume)
  155. images_matrix[:, 0:nii.shape[1], :, i] = nii.get_data()
  156. # Calculate mean and std.
  157. images_mean = np.mean(images_matrix, axis=3)
  158. images_std = images_matrix.std(3)
  159. images_std_scaled = images_std * 2
  160. # Duplicate mean and std to full dimensions for easy matching.
  161. mean_matrix = np.repeat(images_mean[:, :, :, np.newaxis], d, axis=3)
  162. scaled_std_matrix = np.repeat(images_std_scaled[:, :, :, np.newaxis], d, axis=3)
  163. robust_matrix = np.where(
  164. (images_matrix < mean_matrix - scaled_std_matrix) | (images_matrix > mean_matrix + scaled_std_matrix),
  165. np.nan,
  166. images_matrix)
  167. robust_mean = np.nanmedian(robust_matrix, axis=3)
  168. nib.save(make_nii(robust_mean, pixdim=pix_dim), output)
  169. def split_images(images, output_folder, max_depth=50):
  170. temp_folder = output_folder
  171. make_dirs(temp_folder)
  172. for image in images:
  173. # Extract name from path.
  174. name = image.split('/')[-1].split('.')[0]
  175. # Load nii, header and data.
  176. nii = nib.load(image)
  177. header = nii.header
  178. data = nii.get_data()
  179. # Get depth from header and calculate number of parts.
  180. depth = nii.shape[1]
  181. nr_of_parts = math.ceil(depth / max_depth)
  182. # If first iteration create empty list of list of parts.
  183. if image == images[0]:
  184. parts = [[] for _ in range(nr_of_parts)]
  185. output_shape = nii.shape
  186. output_header = copy(header)
  187. # Make parts.
  188. for i in range(nr_of_parts):
  189. # Define form and to index.
  190. from_i = i * max_depth
  191. to_i = min((i + 1) * max_depth - 1, depth)
  192. # Calculate depth of current part. Set in nii header.
  193. cur_depth = to_i - from_i + 1
  194. header['dim'][2] = cur_depth
  195. # Slice data.
  196. cur_data = data[:, from_i:to_i+1, :]
  197. cur_path = temp_folder + name + '_' + str(i) + '.nii'
  198. # Append to parts list.
  199. parts[i] += [cur_path]
  200. # Save part.
  201. nib.save(nib.Nifti1Image(cur_data, None, header), cur_path)
  202. return parts, output_shape, output_header
  203. def robust_large_images(images, output, max_depth=50):
  204. temp_folder = get_parent_dir(images[0]) + 'avg_temp_volumes/'
  205. parts, output_shape, output_header = split_images(images, temp_folder, max_depth=max_depth)
  206. avgs = []
  207. for n, part in enumerate(parts):
  208. print('Averaging: ' + str(part))
  209. cur_avg = temp_folder + str(n) + '_avg.nii'
  210. robust_avg_volumes(part, cur_avg)
  211. avgs += [cur_avg]
  212. final_avg = np.zeros(output_shape)
  213. for m, avg in enumerate(avgs):
  214. print('Appending avg: ' + str(avg))
  215. final_avg[:, m*max_depth:min((m+1)*max_depth, output_shape[1]), :] = nib.load(avg).get_data()
  216. nib.save(nib.Nifti1Image(final_avg, None, output_header), output)
  217. def avg_template(image, template, template_weight, output):
  218. print(image, template)
  219. a = nib.load(image)
  220. b = nib.load(template)
  221. pix_dim = a.header['pixdim']
  222. depth = max([a.shape[1], b.shape[1]])
  223. a_scaled = np.zeros((a.shape[0], depth, a.shape[2]))
  224. b_scaled = np.zeros((a.shape[0], depth, a.shape[2]))
  225. a_scaled[:a.shape[0], :a.shape[1], :a.shape[2]] = a.get_data() * (1 - template_weight)
  226. b_scaled[:b.shape[0], :b.shape[1], :b.shape[2]] = b.get_data() * template_weight
  227. weighted = (a_scaled + b_scaled) / 2
  228. nib.save(make_nii(weighted, pixdim=pix_dim), output)
  229. def normal_list_1d(sigma, n, mu=0.5, max_is_one=False):
  230. """
  231. Normal distribution.
  232. :param sigma: sigma.
  233. :param n: number of values.
  234. :param mu: [0.0:1.0] a float determining the relative position of the peak. 0.0 left, 1.0 right. Default: 0.5.
  235. :return: [x] a list of the natural numbers from 0 to n. [y]: normal value
  236. """
  237. mu = mu*n - 0.5
  238. x = [x for x in range(n)]
  239. y = [1 / (math.sqrt(2 * math.pi * sigma ** 2)) * math.e ** ((-(xi - mu) ** 2) / (2 * sigma ** 2)) for xi in x]
  240. if max_is_one:
  241. max_y = max(y)
  242. y_adjusted = [yi * 1/max_y for yi in y]
  243. return x, y_adjusted
  244. return x, y
  245. def mutual_information(x, y, bins):
  246. c_xy = np.histogram2d(x.ravel(), y.ravel(), bins)[0]
  247. mi = mutual_info_score(None, None, contingency=c_xy)
  248. return mi
  249. def immediate_sub_dirs(directory, full_path=True):
  250. sub_dirs = sorted(next(os.walk(directory))[1])
  251. if full_path:
  252. return [directory + x + '/' for x in sub_dirs]
  253. else:
  254. return sub_dirs