adding_noising_example_for_voting.py 3.7 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677
  1. import os
  2. import glob
  3. import nibabel as nib
  4. import numpy as np
  5. from skimage.util import random_noise
  6. from tqdm import tqdm
  7. import random
  8. import skimage.exposure as exposure
  9. init_path = r"C:\Users\aswen\Desktop\TestingData\Aswendt_qc_rsfmri_plot"
  10. # Define the search paths for noisy files and all files
  11. searchpath_delete = os.path.join(init_path,"**" ,"dwi", "brkraw", "*artifact*.nii.gz")
  12. searchpath = os.path.join(init_path, "**","dwi", "brkraw", "*.nii.gz")
  13. # Delete all noisy files matching the pattern before proceeding
  14. noisy_files = glob.glob(searchpath_delete, recursive=True)
  15. for file in noisy_files:
  16. os.remove(file)
  17. # Get the list of all files
  18. all_files = glob.glob(searchpath, recursive=True)
  19. # Randomly choose 5 files
  20. randomly_chosen = random.sample(all_files, 2)
  21. # Process each file
  22. for file in tqdm(randomly_chosen, desc="Processing", unit="file"):
  23. print(file)
  24. Im = nib.load(file)
  25. data = Im.get_fdata() # Getting image data from the nibabel object
  26. # Normalize the data to [0, 1] range
  27. data_min = np.min(data)
  28. data_max = np.max(data)
  29. max_abs = np.max(np.abs(data))
  30. # Normalize the data to [0, 1] range
  31. normalized_data = data/max_abs
  32. # Add weak artifact: Gaussian noise only
  33. #Im_weak_artifact = random_noise(normalized_data, mode='gaussian', mean=0, var=0.001) # Adjust the variance for noise
  34. motion_factor = 0.7
  35. # Add strong artifact: Gaussian noise + sudden shifts along the appropriate axis
  36. if normalized_data.ndim == 2: # Check if the data is 4D
  37. # Generate motion artifacts along the third and fourth dimensions for 4D images
  38. translation_third_dim = int(normalized_data.shape[2] * motion_factor)
  39. translation_fourth_dim = int(normalized_data.shape[3] * motion_factor)
  40. motion_artifact = np.roll(normalized_data, translation_third_dim, axis=1)
  41. #motion_artifact = np.roll(motion_artifact, translation_fourth_dim, axis=3)
  42. motion_artifact = (2 * motion_artifact + 3 * normalized_data) / 5 # Weighted combination
  43. normalized_data_with_added_motion = normalized_data
  44. normalized_data_with_added_motion[:, :, :, normalized_data.shape[3] // 2:normalized_data.shape[3] // 2 + 40:2] = motion_artifact[:, :, :, normalized_data.shape[3] // 2:normalized_data.shape[3] // 2 + 40:2]
  45. Im_strong_artifact = random_noise(normalized_data_with_added_motion, mode='speckle', var=0.2) # Adjust the variance for noise
  46. gamma = 0.6 # Adjust the gamma value to control the darkness
  47. Im_strong_artifact = exposure.adjust_gamma(Im_strong_artifact, gamma)
  48. else:
  49. Im_strong_artifact = random_noise(normalized_data, mode='speckle', var=0.2) # Adjust the variance for noise
  50. # Darken the intensity using gamma correction
  51. gamma = 0.8 # Adjust the gamma value to control the darkness
  52. Im_strong_artifact = exposure.adjust_gamma(Im_strong_artifact, gamma)
  53. # Combine weak and strong artifacts by averaging with weights
  54. weight_weak = 0.5 # Weight for weak artifact
  55. weight_strong = 0.2 # Weight for strong artifact
  56. #Im_combined_artifact = (weight_weak * Im_weak_artifact + weight_strong * Im_strong_artifact) / (weight_weak + weight_strong)
  57. # Creating new file names for weak and strong artifact images
  58. weak_artifact_filename = os.path.splitext(file)[0] + "_weak_artifact.nii.gz"
  59. strong_artifact_filename = os.path.splitext(file)[0] + "_strong_artifact.nii.gz"
  60. # Save the artifact images
  61. #nib.save(nib.Nifti1Image(Im_weak_artifact*max_abs, Im.affine), weak_artifact_filename)
  62. nib.save(nib.Nifti1Image(Im_strong_artifact*max_abs, Im.affine), strong_artifact_filename)