Browse Source

changes for Python 3.7+ compatibility; minor bug fixes

Ajayrama Kumaraswamy 3 years ago
parent
commit
742f305d13
46 changed files with 256 additions and 264 deletions
  1. 1 1
      Readme.md
  2. 7 7
      regmaxsn/core/SWCTransforms.py
  3. 1 1
      regmaxsn/core/farthestPointStats.py
  4. 15 21
      regmaxsn/core/iterativeRegistration.py
  5. 3 2
      regmaxsn/core/maxDistanceBasedMetric.py
  6. 3 2
      regmaxsn/core/occupancyBasedMeasure.py
  7. 10 9
      regmaxsn/core/plotDensities.py
  8. 10 10
      regmaxsn/core/rotOnce.py
  9. 9 7
      regmaxsn/core/scaleOnce.py
  10. 2 2
      regmaxsn/core/swcFuncs.py
  11. 8 10
      regmaxsn/core/transOnce.py
  12. 2 2
      regmaxsn/core/transforms.py
  13. 10 10
      regmaxsn/scripts/algorithms/RegMaxS.py
  14. 13 13
      regmaxsn/scripts/algorithms/RegMaxSN.py
  15. 1 1
      regmaxsn/scripts/algorithms/alignAllRot.py
  16. 1 1
      regmaxsn/scripts/algorithms/alignAllScale.py
  17. 1 1
      regmaxsn/scripts/algorithms/alignAllTrans.py
  18. 4 4
      regmaxsn/scripts/algorithms/pcaBasedReg.py
  19. 4 4
      regmaxsn/scripts/analysis/calcD2Metrics.py
  20. 5 6
      regmaxsn/scripts/analysis/compareRegPerfAA1.py
  21. 5 6
      regmaxsn/scripts/analysis/compareRegPerfLLC.py
  22. 5 6
      regmaxsn/scripts/analysis/compareRegPerfOMB.py
  23. 5 6
      regmaxsn/scripts/analysis/compareRegPerfOPSInt.py
  24. 5 6
      regmaxsn/scripts/analysis/compareRegPerfRAL.py
  25. 5 6
      regmaxsn/scripts/analysis/compareRegPerfRAL_min5.py
  26. 14 14
      regmaxsn/scripts/analysis/pairwiseDistanceStats.py
  27. 13 13
      regmaxsn/scripts/analysis/pairwiseDistanceStatsNN.py
  28. 20 20
      regmaxsn/scripts/analysis/pairwiseDistanceStatsVsAnisotropicScaling.py
  29. 5 5
      regmaxsn/scripts/analysis/plotCHBasedMetricVsAverageRotScaleTransform.py
  30. 2 2
      regmaxsn/scripts/analysis/plotCHDBasedMeasureVsIterations.py
  31. 5 5
      regmaxsn/scripts/analysis/plotOccupancyBasedMeasureVsAverageRotScaleTransform.py
  32. 4 4
      regmaxsn/scripts/analysis/plotOccupancyMeasureVsIterations.py
  33. 5 5
      regmaxsn/scripts/analysis/plotPairwiseDistance.py
  34. 4 4
      regmaxsn/scripts/analysis/plotPairwiseDistanceNN.py
  35. 7 6
      regmaxsn/scripts/analysis/plotReg-MaxS-NIOUTraj.py
  36. 5 5
      regmaxsn/scripts/analysis/plotRegMaxSPerfVsNoise.py
  37. 5 5
      regmaxsn/scripts/analysis/regErrorVsAnisotropicscaling.py
  38. 3 7
      regmaxsn/scripts/analysis/saveAverageDensity.py
  39. 1 1
      regmaxsn/scripts/analysis/viz2DOccupancy.py
  40. 1 1
      regmaxsn/scripts/utils/addRandomNoise.py
  41. 2 1
      regmaxsn/scripts/utils/constructRegMaxSNParFile.py
  42. 14 14
      regmaxsn/scripts/utils/constructRegMaxSParFile.py
  43. 1 1
      regmaxsn/scripts/utils/correctReg-MaxS-N_finalChoice.py
  44. 1 1
      setup.py
  45. 7 6
      setupWorkspace.py
  46. 2 0
      tests/maxDistancesBasedMetric_test.py

+ 1 - 1
Readme.md

@@ -20,7 +20,7 @@ The algorithms are written in  Python and at the moment work only with SWC files
 With Conda (Linux or Windows):
 
 1. Create a new environment: 
->conda create --name regmaxsn python=2.7 numpy scipy pillow matplotlib scikit-learn pandas seaborn openpyxl xlrd statsmodels
+>conda create --name regmaxsn python=3.7 numpy scipy pillow matplotlib scikit-learn pandas seaborn openpyxl xlrd statsmodels
 2. Activate environment: 
 >(on Linux) source activate regmaxsn 
 >(on Windows) activate regmaxsn

+ 7 - 7
regmaxsn/core/SWCTransforms.py

@@ -1,7 +1,7 @@
 import os
 import numpy as np
-from transforms import compose_matrix
-from swcFuncs import readSWC_numpy, writeSWC_numpy
+from regmaxsn.core.transforms import compose_matrix
+from regmaxsn.core.swcFuncs import readSWC_numpy, writeSWC_numpy
 
 
 def three32BitInt2complexList(arr):
@@ -39,7 +39,7 @@ class BaseSWCTranform(object):
         elif type(refSWC) == np.ndarray:
             self.refSWCPts = refSWC[:, 2:5]
         else:
-            raise(ValueError('Unknown data in SWC2Align'))
+            raise ValueError('Unknown data in SWC2Align')
 
         self.refCenter = self.refSWCPts.mean(axis=0)
         refVox = np.array(np.round(self.refSWCPts / gridSize), np.int32)
@@ -53,7 +53,7 @@ class BaseSWCTranform(object):
             self.SWC2AlignFull = SWC2Align
             self.headr = ''
         else:
-            raise(ValueError('Unknown data in SWC2Align'))
+            raise ValueError('Unknown data in SWC2Align')
         self.SWC2AlignPts = self.SWC2AlignFull[:, 2:5]
         self.center = self.SWC2AlignPts.mean(axis=0)
 
@@ -111,7 +111,7 @@ class SWCScale(object):
         elif type(refSWC) == np.ndarray:
             self.refSWCPts = refSWC[:, 2:5]
         else:
-            raise(ValueError('Unknown data in SWC2Align'))
+            raise ValueError('Unknown data in SWC2Align')
 
         refCenter = self.refSWCPts.mean(axis=0)
         refSWCPtsCentered = self.refSWCPts - refCenter
@@ -125,7 +125,7 @@ class SWCScale(object):
             self.SWC2AlignFull = SWC2Align
             self.headr = ''
         else:
-            raise(ValueError('Unknown data in SWC2Align'))
+            raise ValueError('Unknown data in SWC2Align')
 
         self.SWC2AlignPts = self.SWC2AlignFull[:, 2:5].copy()
         self.center = self.SWC2AlignPts.mean(axis=0)
@@ -165,7 +165,7 @@ class ArgGenIterator:
         self.pointsDone = 0
         return self
 
-    def next(self):
+    def __next__(self):
 
         if self.pointsDone < len(self.arg1):
             toReturn = (self.arg1[self.pointsDone], self.arg2)

+ 1 - 1
regmaxsn/core/farthestPointStats.py

@@ -25,7 +25,7 @@ def maxDistStats(swcFiles):
 
     for swcInd1, swcData1 in enumerate(swcDatas):
 
-        swcInds = range(len(swcFiles))
+        swcInds = list(range(len(swcFiles)))
         swcInds.remove(swcInd1)
         swcDataSeries1 = swcDataSeries[swcInd1]
 

+ 15 - 21
regmaxsn/core/iterativeRegistration.py

@@ -5,6 +5,9 @@ from .SWCTransforms import SWCTranslate, objFun
 import shutil
 import json
 import subprocess
+from functools import reduce
+import pathlib as pl
+
 
 def transPreference(x, y):
     """
@@ -71,8 +74,6 @@ class IterativeRegistration(object):
         self.nCPU = nCPU
         self.allFuncs = {'trans': self.transOnce, 'rot': self.rotOnce, 'scale': self.scaleOnce}
 
-
-
     def rotOnce(self, SWC2Align, outFiles, ipParFile):
         """
         Runs exhaustive search to find the best rotation euler angles about XYZ axes that maximize the volume overlap
@@ -102,7 +103,7 @@ class IterativeRegistration(object):
             bestSol = out['bestSol']
             done = out['done']
             bestVal = out['bestVal']
-            print(bestSol, bestVal, done)
+            print((bestSol, bestVal, done))
 
         return bestSol, bestVal, done
 
@@ -135,7 +136,7 @@ class IterativeRegistration(object):
             bestSol = out['bestSol']
             done = out['done']
             bestVal = out['bestVal']
-            print(bestSol, bestVal, done)
+            print((bestSol, bestVal, done))
 
         return bestSol, bestVal, done
 
@@ -168,7 +169,7 @@ class IterativeRegistration(object):
             bestSol = out['bestSol']
             done = out['done']
             bestVal = out['bestVal']
-            print(bestSol, bestVal, done)
+            print((bestSol, bestVal, done))
 
         return bestSol, bestVal, done
 
@@ -204,8 +205,7 @@ class IterativeRegistration(object):
             elif g == 'trans':
                 bestSol, bestVal, done = self.transOnce(SWC2Align, tempOutFiles[g], ipParFile)
             else:
-                raise('Invalid transformation type ' + g)
-
+                raise ValueError(f'Invalid transformation type {g}')
 
             tempDones[g] = done
 
@@ -216,13 +216,8 @@ class IterativeRegistration(object):
                 presBestSol = bestSol
                 presBestDone = done
 
-
         return tempDones, presBestSol, presBestVal, presBestDone, presBestTrans
 
-
-
-
-
     def performReg(self, SWC2Align, resFile, scaleBounds,
                    inPartsDir=None, outPartsDir=None,
                    initGuessType='just_centroids',
@@ -246,6 +241,7 @@ class IterativeRegistration(object):
         """
 
         resDir, expName = os.path.split(resFile[:-4])
+        pl.Path(resDir).mkdir(exist_ok=True)
 
         ipParFile = os.path.join(resDir, 'tmp.json')
         vals = ['trans', 'rot', 'scale']
@@ -280,8 +276,7 @@ class IterativeRegistration(object):
             totalTranslation = SWC2AlignMean
 
         else:
-            raise(ValueError('Unknown value for argument \'initGuessType\''))
-
+            raise ValueError('Unknown value for argument \'initGuessType\'')
 
         SWC2AlignT = SWC2AlignLocal
 
@@ -293,7 +288,6 @@ class IterativeRegistration(object):
             done = False
             srts = ['rot', 'trans']
 
-
             while not done:
 
                 tempDones, bestSol, bestVal, lDone, g = self.compare(srts, SWC2AlignT, tempOutFiles, ipParFile, None)
@@ -313,7 +307,7 @@ class IterativeRegistration(object):
                     else:
                         totalTransform = np.dot(presTrans, totalTransform)
 
-                print(str(iterationNo) + g)
+                print((str(iterationNo) + g))
 
                 bestVals[bestVal] = {"outFile": outFile, "outFileSol": outFileSol,
                                      "totalTransform": totalTransform,
@@ -342,7 +336,7 @@ class IterativeRegistration(object):
                 presTrans = np.array(pars['transMat'])
                 totalTransform = np.dot(presTrans, totalTransform)
 
-            print(str(iterationNo) + 's')
+            print((str(iterationNo) + 's'))
 
             bestVals[bestVal] = {"outFile": outFile, "outFileSol": outFileSol,
                                  "totalTransform": totalTransform,
@@ -387,7 +381,7 @@ class IterativeRegistration(object):
                     else:
                         totalTransform = np.dot(presTrans, totalTransform)
 
-                print(str(iterationNo) + g)
+                print((str(iterationNo) + g))
 
                 bestVals[bestVal] = {"outFile": outFile, "outFileSol": outFileSol,
                                      "totalTransform": totalTransform,
@@ -404,7 +398,7 @@ class IterativeRegistration(object):
         totalTranslation = bestVals[championBestVal]["totalTranslation"]
         bestIterIndicator = bestVals[championBestVal]["iterationIndicator"]
 
-        print("bestIter: {}, bestVal: {}".format(bestIterIndicator, championBestVal))
+        print(("bestIter: {}, bestVal: {}".format(bestIterIndicator, championBestVal)))
 
         totalTransform[:3, 3] += totalTranslation
 
@@ -447,7 +441,7 @@ class IterativeRegistration(object):
                                            )
 
             else:
-                print('Specified partsDir {} not found'.format(inPartsDir))
+                print(('Specified partsDir {} not found'.format(inPartsDir)))
 
 
         return finalFile, finalSolFile
@@ -525,7 +519,7 @@ def writeFakeSWC(data, fName, extraCol=None):
         toWrite = np.empty((data.shape[0], 7))
 
     toWrite[:, 2:5] = data
-    toWrite[:, 0] = range(1, data.shape[0] + 1)
+    toWrite[:, 0] = list(range(1, data.shape[0] + 1))
     toWrite[:, 1] = 3
     toWrite[:, 5] = 1
     toWrite[:, 6] = -np.arange(1, data.shape[0] + 1)

+ 3 - 2
regmaxsn/core/maxDistanceBasedMetric.py

@@ -43,8 +43,9 @@ def calcMaxDistances(swcList):
 
     unionWithDuplicates = np.concatenate(swcPointSets, axis=0)
     if any(np.abs(unionWithDuplicates).max(axis=0) == 0):
-        raise(ValueError("The list of SWCs all lie on a plane or on  a line and hence do not "
-                         "for a 3D point cloud. Such SWCs are not supported."))
+        raise ValueError(
+            "The list of SWCs all lie on a plane or on a line and hence do not "
+            "for a 3D point cloud. Such SWCs are not supported.")
 
     hull = ConvexHull(unionWithDuplicates)
 

+ 3 - 2
regmaxsn/core/occupancyBasedMeasure.py

@@ -2,6 +2,7 @@ import numpy as np
 from collections import Counter
 from pyemd import emd
 
+
 def calcOccupancyDistribution(swcList, voxelSize):
     """
     Returns the distribution of the sum of voxel occupancies across swcs in swcList.
@@ -19,7 +20,7 @@ def calcOccupancyDistribution(swcList, voxelSize):
         voxels.extend(list(aVoxSet))
 
     voxelCounter = Counter(voxels)
-    counts = voxelCounter.values()
+    counts = list(voxelCounter.values())
 
     bins = np.arange(1, len(swcList) + 2) - 0.5
 
@@ -29,7 +30,7 @@ def calcOccupancyDistribution(swcList, voxelSize):
 
     histNormed = histWeighted / float(sum(histWeighted))
 
-    return dict(zip(np.arange(1, len(swcList) + 1), histNormed))
+    return {k + 1: v for k, v in enumerate(histNormed)}
 
 
 def occupancyEMD(swcList, voxelSize):

+ 10 - 9
regmaxsn/core/plotDensities.py

@@ -4,6 +4,7 @@ import os
 from scipy.ndimage import gaussian_filter
 import tifffile
 
+
 class DensityVizualizations(object):
 
     def __init__(self, swcSet, gridUnitSizes, resampleLen,
@@ -25,13 +26,13 @@ class DensityVizualizations(object):
         datas = {}
 
         for swcInd, swcFile in enumerate(swcSet):
-            print('Resamping ' + swcFile)
+            print(('Resamping ' + swcFile))
             totalLen, data = resampleSWC(swcFile, resampleLen, mask=masks[swcInd])
             dataT = np.dot(initTrans, data[:, :3].T).T
             datas[swcFile] = dataT
         self.transMat[:3, :3] = initTrans
 
-        allData = np.concatenate(tuple(datas.itervalues()), axis=0)
+        allData = np.concatenate(tuple(datas.values()), axis=0)
         self.allDataMean = allData.mean(axis=0)
 
         if pcaView == 'closestPCMatch':
@@ -42,7 +43,7 @@ class DensityVizualizations(object):
                 refEvecs, thrash = getPCADetails(refSWC, center=True)
                 fEvecs = np.empty_like(refEvecs)
                 coreff = np.dot(refEvecs.T, evecs)
-                possInds = range(refEvecs.shape[1])
+                possInds = list(range(refEvecs.shape[1]))
                 for rowInd in range(refEvecs.shape[1]):
                     bestCorrInd = np.argmax(np.abs(coreff[rowInd, possInds]))
                     fEvecs[:, rowInd] = np.sign(coreff[rowInd, possInds[bestCorrInd]]) * evecs[:, possInds[bestCorrInd]]
@@ -61,7 +62,7 @@ class DensityVizualizations(object):
                 mean2Use = np.loadtxt(refSWC)[:, 2:5].mean(axis=0)
 
             else:
-                raise(ValueError('RefSWC must be specified when pcaView == \'assumeRegistered\''))
+                raise ValueError('RefSWC must be specified when pcaView == \'assumeRegistered\'')
 
         else:
             fEvecs = np.eye(3)
@@ -69,8 +70,8 @@ class DensityVizualizations(object):
 
 
         self.digDatas = {}
-        for swcFile, data in datas.iteritems():
-            print('Digitizing ' + swcFile)
+        for swcFile, data in datas.items():
+            print(('Digitizing ' + swcFile))
             data -= mean2Use
             data = np.dot(fEvecs.T, data.T).T
             digData = digitizeSWCXYZ(data + mean2Use, gridUnitSizes)
@@ -106,13 +107,13 @@ class DensityVizualizations(object):
 
         for swcFile in swcFiles:
 
-            print('Calculating Density for ' + swcFile)
+            print(('Calculating Density for ' + swcFile))
             densityMat = np.zeros_like(densityMatSum)
-            print('Doing ' + os.path.split(swcFile)[1])
+            print(('Doing ' + os.path.split(swcFile)[1]))
             if swcFile in self.digDatas:
                 digDataTranslated = self.digDatas[swcFile][:, :3] - self.minXYZ
             else:
-                raise(ValueError(swcFile + ' not initialized in constructing DensityVizualizations object'))
+                raise ValueError(swcFile + ' not initialized in constructing DensityVizualizations object')
             densityMat[digDataTranslated[:, 0], digDataTranslated[:, 1], digDataTranslated[:, 2]] = 1
             densityMatSum += densityMat
             del densityMat

+ 10 - 10
regmaxsn/core/rotOnce.py

@@ -1,4 +1,4 @@
-from SWCTransforms import SWCRotate, ArgGenIterator, objFun
+from regmaxsn.core.SWCTransforms import SWCRotate, ArgGenIterator, objFun
 import multiprocessing as mp
 import numpy as np
 import json
@@ -31,16 +31,16 @@ bestSol = [0, 0, 0]
 for gridInd, gridSize in enumerate(gridSizes):
 
     if debugging:
-        print('Gridsize:' + str(gridSize))
+        print(('Gridsize:' + str(gridSize)))
     stepSize = stepSizes[gridInd]
     if debugging:
-        print('Stepsize: ' + str(np.rad2deg(stepSize)))
+        print(('Stepsize: ' + str(np.rad2deg(stepSize))))
     bounds = (np.array(bounds).T - np.array(bestSol)).T
     boundsRoundedUp = np.sign(bounds) * np.ceil(np.abs(bounds) / stepSize) * stepSize
     possiblePts1D = [np.round(bestSol[ind] + np.arange(x[0], x[1] + stepSize, stepSize), 3).tolist()
                      for ind, x in enumerate(boundsRoundedUp)]
     if debugging:
-        print(np.rad2deg([bestSol[ind] + x for ind, x in enumerate(boundsRoundedUp)]))
+        print((np.rad2deg([bestSol[ind] + x for ind, x in enumerate(boundsRoundedUp)])))
     possiblePts3D = np.round(list(product(*possiblePts1D)), 6).tolist()
     argGen = ArgGenIterator(possiblePts3D, SWCDatas[gridInd])
     funcVals = pool.map_async(objFun, argGen).get(1800)
@@ -54,21 +54,21 @@ for gridInd, gridSize in enumerate(gridSizes):
 
         prevVals = [objFun((x, SWCDatas[gridInd - 1])) for x in minimzers]
         bestSol = minimzers[np.argmin(prevVals)]
-    bounds = map(lambda x: [x - np.sqrt(2) * stepSize, x + np.sqrt(2) * stepSize], bestSol)
+    bounds = [[x - np.sqrt(2) * stepSize, x + np.sqrt(2) * stepSize] for x in bestSol]
 
     if debugging:
         bestVal = objFun((bestSol, SWCDatas[gridInd]))
-        print(np.rad2deg(bestSol), bestVal)
+        print((np.rad2deg(bestSol), bestVal))
 
 
 if minRes < stepSizes[-1]:
 
     if debugging:
-        print('Stepsize: ' + str(np.rad2deg(minRes)))
+        print(('Stepsize: ' + str(np.rad2deg(minRes))))
     bounds = (np.array(bounds).T - np.array(bestSol)).T
     boundsRoundedUp = np.sign(bounds) * np.ceil(np.abs(bounds) / minRes) * minRes
     if debugging:
-        print(np.rad2deg([bestSol[ind] + x for ind, x in enumerate(boundsRoundedUp)]))
+        print((np.rad2deg([bestSol[ind] + x for ind, x in enumerate(boundsRoundedUp)])))
     possiblePts1D = [np.round(bestSol[ind] + np.arange(x[0], x[1] + minRes, minRes), 3).tolist()
                      for ind, x in enumerate(boundsRoundedUp)]
     possiblePts3D = np.round(list(product(*possiblePts1D)), 6).tolist()
@@ -81,12 +81,12 @@ if minRes < stepSizes[-1]:
     bestSol = minimzers[np.argmin(prevVals)]
     if debugging:
         bestVal = objFun((bestSol, SWCDatas[-1]))
-        print(np.rad2deg(bestSol), bestVal)
+        print((np.rad2deg(bestSol), bestVal))
 
 bestVal = objFun((bestSol, SWCDatas[-1]))
 nochange = objFun(([0, 0, 0], SWCDatas[-1]))
 if debugging:
-    print(np.rad2deg(bestSol), bestVal, nochange)
+    print((np.rad2deg(bestSol), bestVal, nochange))
 
 
 done = False

+ 9 - 7
regmaxsn/core/scaleOnce.py

@@ -1,4 +1,4 @@
-from SWCTransforms import SWCScale, SWCTranslate, ArgGenIterator, objFun
+from regmaxsn.core.SWCTransforms import SWCScale, SWCTranslate, ArgGenIterator, objFun
 import multiprocessing as mp
 import numpy as np
 import json
@@ -30,7 +30,7 @@ bestSol = [1.0, 1.0, 1.0]
 
 stepSizes = [max(minStepSize, min(2.0, (maxDist / (maxDist - g)))) for g in gridSizes]
 if debugging:
-    print(maxDist, [(maxDist / (maxDist - g)) for g in gridSizes])
+    print((maxDist, [(maxDist / (maxDist - g)) for g in gridSizes]))
 
 overestimationError = lambda d, g: (d + g) / d
 underestimationError = lambda d, g: ((d + 1.5 * g) * d) / ((d - 0.5 * g) * (d + g))
@@ -47,10 +47,11 @@ for gridInd, gridSize in enumerate(gridSizes):
                         for x, y in enumerate(boundsExponentsRoundedDown)]
     if debugging:
         print(stepSize)
-        print('Gridsize:' + str(gridSize))
+        print(('Gridsize:' + str(gridSize)))
         print(bounds)
-        print(map(len, possiblePts1D))
+        print([len(x) for x in possiblePts1D])
         print([bestSol[x] * (stepSize ** y) for x, y in enumerate(boundsExponentsRoundedDown)])
+
     possiblePts3D = np.round(list(product(*possiblePts1D)), 6).tolist()
     argGen = ArgGenIterator(possiblePts3D, SWCDatas[gridInd])
     funcVals = pool.map_async(objFun, argGen).get(1800)
@@ -85,8 +86,9 @@ if stepSizes[-1] > minStepSize:
     if debugging:
         print(stepSize)
         print(bounds)
-        print(map(len, possiblePts1D))
+        print([len(x) for x in possiblePts1D])
         print([bestSol[x] * (stepSize ** y) for x, y in enumerate(boundsExponentsRoundedDown)])
+
     possiblePts3D = np.round(list(product(*possiblePts1D)), 6).tolist()
     argGen = ArgGenIterator(possiblePts3D, SWCDatas[-1])
     funcVals = pool.map_async(objFun, argGen).get(1800)
@@ -95,14 +97,14 @@ if stepSizes[-1] > minStepSize:
     prevVals = [objFun((x, SWCDatas[-2])) for x in minimzers]
     bestSol = minimzers[np.argmin(prevVals)]
     if debugging:
-        print(bestSol, min(funcVals))
+        print((bestSol, min(funcVals)))
 
 bestVal = objFun((bestSol, SWCDatas[-1]))
 nochange = objFun(([1, 1, 1], SWCDatas[-1]))
 
 if debugging:
     bestVals = [objFun((bestSol, x)) for x in SWCDatas]
-    print(bestSol, nochange, bestVal)
+    print((bestSol, nochange, bestVal))
 
 done = False
 

+ 2 - 2
regmaxsn/core/swcFuncs.py

@@ -73,7 +73,7 @@ def transSWC(fName, A, b, destFle):
     elif data.shape[1] == 8:
         formatStr = '%d %d %0.3f %0.3f %0.3f %0.3f %d %d'
     else:
-        raise(TypeError('Data in the input file is of unknown format.'))
+        raise TypeError('Data in the input file is of unknown format.')
 
     np.savetxt(destFle, data, header=headr, fmt=formatStr)
 
@@ -111,7 +111,7 @@ def transSWC_rotAboutPoint(fName, A, b, destFle, point):
     elif data.shape[1] == 8:
         formatStr = '%d %d %0.3f %0.3f %0.3f %0.3f %d %d'
     else:
-        raise(TypeError('Data in the input file is of unknown format.'))
+        raise TypeError('Data in the input file is of unknown format.')
 
     np.savetxt(destFle, data, header=headr, fmt=formatStr)
 #***********************************************************************************************************************

+ 8 - 10
regmaxsn/core/transOnce.py

@@ -1,4 +1,4 @@
-from SWCTransforms import SWCTranslate, ArgGenIterator, objFun
+from regmaxsn.core.SWCTransforms import SWCTranslate, ArgGenIterator, objFun
 import multiprocessing as mp
 import numpy as np
 import json
@@ -31,9 +31,9 @@ for gridInd, gridSize in enumerate(gridSizes):
 
     possiblePts3D = list(product(*possiblePts1D))
     if debugging:
-        print('Gridsize:' + str(gridSize))
+        print(('Gridsize:' + str(gridSize)))
         print(bounds)
-        print(map(len, possiblePts1D))
+        print([len(x) for x in possiblePts1D])
         print([bestSol[ind] + x for ind, x in enumerate(boundsRoundedUp)])
 
     argGen = ArgGenIterator(possiblePts3D, SWCDatas[gridInd])
@@ -48,10 +48,10 @@ for gridInd, gridSize in enumerate(gridSizes):
 
         prevVals = [objFun((x, SWCDatas[gridInd - 1])) for x in minimzers]
         bestSol = minimzers[np.argmin(prevVals)]
-    bounds = map(lambda x: [x - gridSize, x + gridSize], bestSol)
+    bounds = [[x - gridSize, x + gridSize] for x in bestSol]
     if debugging:
         bestVal = objFun((bestSol, SWCDatas[gridInd]))
-        print(bestSol, bestVal)
+        print((bestSol, bestVal))
 
 if minRes < gridSizes[-1]:
 
@@ -62,9 +62,9 @@ if minRes < gridSizes[-1]:
     possiblePts3D = list(product(*possiblePts1D))
 
     if debugging:
-        print('StepSize:' + str(minRes))
+        print(('StepSize:' + str(minRes)))
         print(bounds)
-        print(map(len, possiblePts1D))
+        print([len(x) for x in possiblePts1D])
         print([bestSol[ind] + x for ind, x in enumerate(boundsRoundedUp)])
 
     argGen = ArgGenIterator(possiblePts3D, SWCDatas[-1])
@@ -80,7 +80,7 @@ bestVal = objFun((bestSol, SWCDatas[-1]))
 nochange = objFun(([0, 0, 0], SWCDatas[-1]))
 if debugging:
     bestVals = [objFun((bestSol, x)) for x in SWCDatas]
-    print(bestSol, bestVals)
+    print((bestSol, bestVals))
 
 done = False
 
@@ -101,8 +101,6 @@ elif bestVal == nochange:
         bestSol = [0, 0, 0]
         bestVal = nochange
 
-
-
 SWCDatas[-1].writeSolution(outFiles[0], bestSol)
 matrix = compose_matrix(translate=bestSol).tolist()
 with open(outFiles[1], 'w') as fle:

+ 2 - 2
regmaxsn/core/transforms.py

@@ -193,7 +193,7 @@ True
 
 """
 
-from __future__ import division, print_function
+
 
 import math
 
@@ -1673,7 +1673,7 @@ _AXES2TUPLE = {
     'rzxy': (1, 1, 0, 1), 'ryxy': (1, 1, 1, 1), 'ryxz': (2, 0, 0, 1),
     'rzxz': (2, 0, 1, 1), 'rxyz': (2, 1, 0, 1), 'rzyz': (2, 1, 1, 1)}
 
-_TUPLE2AXES = dict((v, k) for k, v in _AXES2TUPLE.items())
+_TUPLE2AXES = dict((v, k) for k, v in list(_AXES2TUPLE.items()))
 
 
 def vector_norm(data, axis=None, out=None):

+ 10 - 10
regmaxsn/scripts/algorithms/RegMaxS.py

@@ -2,6 +2,7 @@ import sys
 from regmaxsn.core.iterativeRegistration import IterativeRegistration
 from regmaxsn.core.misc import parFileCheck
 import os
+import pathlib as pl
 
 
 def runRegMaxS(parFile, parNames):
@@ -9,27 +10,26 @@ def runRegMaxS(parFile, parNames):
 
     for pars in parsList:
         print('Current Parameters:')
-        for parN, parV in pars.iteritems():
-            print('{}: {}'.format(parN, parV))
+        for parN, parV in pars.items():
+            print(('{}: {}'.format(parN, parV)))
 
         resFile = pars['resFile']
         refSWC = pars['refSWC']
         testSWC = pars['testSWC']
 
-        if os.path.isfile(resFile):
+        res_filepath = pl.Path(resFile)
+        if res_filepath.is_file():
 
-            ch = raw_input('File exists: ' + resFile + '\nDelete(y/n)?')
+            ch = input('File exists: ' + resFile + '\nDelete(y/n)?')
             if ch == 'y':
-                os.remove(resFile)
+                res_filepath.unlink()
             else:
                 quit()
 
-        resDir = os.path.split(resFile)[0]
-        if not os.path.exists(resDir):
-            raise(ValueError('Could not create result file in specified directory: {}'.format(resDir)))
+        res_filepath.parent.mkdir(exist_ok=True)
 
-        assert os.path.isfile(refSWC), 'Could  not find {}'.format(refSWC)
-        assert os.path.isfile(testSWC), 'Could  not find {}'.format(testSWC)
+        assert pl.Path(refSWC).is_file(), 'Could  not find {}'.format(refSWC)
+        assert pl.Path(testSWC).is_file(), 'Could  not find {}'.format(testSWC)
 
         iterReg = IterativeRegistration(refSWC=pars['refSWC'],
                                         gridSizes=pars['gridSizes'],

+ 13 - 13
regmaxsn/scripts/algorithms/RegMaxSN.py

@@ -52,7 +52,7 @@ def runRegMaxSN(parFile, parNames):
 
     assert os.path.isfile(parFile), "{} not found".format(parFile)
 
-    ch = raw_input('Using parameter File {}.\n Continue?(y/n)'.format(parFile))
+    ch = input('Using parameter File {}.\n Continue?(y/n)'.format(parFile))
 
     if ch != 'y':
         print('User Abort!')
@@ -68,7 +68,7 @@ def runRegMaxSN(parFile, parNames):
 
         if os.path.isdir(resDir):
 
-            ch = raw_input('Folder exists: ' + resDir + '\nDelete(y/n)?')
+            ch = input('Folder exists: ' + resDir + '\nDelete(y/n)?')
             if ch == 'y':
                 shutil.rmtree(resDir)
             else:
@@ -76,7 +76,7 @@ def runRegMaxSN(parFile, parNames):
         try:
             os.mkdir(resDir)
         except Exception as e:
-            raise(IOError('Could not create {}'.format(resDir)))
+            raise IOError
 
         assert os.path.isfile(refSWC), 'Could  not find {}'.format(refSWC)
 
@@ -90,11 +90,11 @@ def runRegMaxSN(parFile, parNames):
 
     for parInd, pars in enumerate(parsList):
 
-        print('Starting Job # {}'.format(parInd + 1))
+        print(('Starting Job # {}'.format(parInd + 1)))
 
         print('Current Parameters:')
-        for parN, parV in pars.iteritems():
-            print('{}: {}'.format(parN, parV))
+        for parN, parV in pars.items():
+            print(('{}: {}'.format(parN, parV)))
 
         resDir = pars['resDir']
         refSWC = pars['initRefSWC']
@@ -138,7 +138,7 @@ def runRegMaxSN(parFile, parNames):
             for swcInd, swc in enumerate(swcList):
                 dirPath, expName = os.path.split(swc[:-4])
 
-                print('Doing Iter ' + str(iterInd) + ' : ' + expName)
+                print(('Doing Iter ' + str(iterInd) + ' : ' + expName))
 
                 SWC2Align = prevAlignedSWCs[swcInd]
 
@@ -186,10 +186,10 @@ def runRegMaxSN(parFile, parNames):
                         shutil.rmtree(outPartsDir)
                         shutil.copytree(inPartsDir, outPartsDir)
                     os.remove(resSol)
-                    print('finalVal (' + str(finalVals) + ') >= initVal (' + str(initVals) + '). Doing Nothing!')
+                    print(('finalVal (' + str(finalVals) + ') >= initVal (' + str(initVals) + '). Doing Nothing!'))
                     done = True
                 else:
-                    print('finalVal (' + str(finalVals) + ') < initVal (' + str(initVals) + '). Keeping the iteration!')
+                    print(('finalVal (' + str(finalVals) + ') < initVal (' + str(initVals) + '). Keeping the iteration!'))
                     with open(resSol, 'r') as fle:
                         pars = json.load(fle)
                         totalTrans = np.array(pars['finalTransMat'])
@@ -198,9 +198,9 @@ def runRegMaxSN(parFile, parNames):
                         scale, shear, angles, trans, persp = decompose_matrix(totalTrans)
                         nrnScaleBounds[swc] = getRemainderScale(scale, nrnScaleBounds[swc])
                 dones.append(done)
-                print('Finished ' + expName + ' : ' + str(done))
+                print(('Finished ' + expName + ' : ' + str(done)))
 
-                print('Remainder scale: ' + str(nrnScaleBounds[swc]))
+                print(('Remainder scale: ' + str(nrnScaleBounds[swc])))
                 presAlignedSWCs.append(resSWC)
 
 
@@ -234,9 +234,9 @@ def runRegMaxSN(parFile, parNames):
 
         with open(finalSolFile, 'w') as fle:
             json.dump({'finalVal': bestMeasure,
-                       'bestIteration': bestIterInd}, fle)
+                       'bestIteration': int(bestIterInd)}, fle)
 
-        print ('Finished Job # {}'.format(parInd + 1))
+        print(('Finished Job # {}'.format(parInd + 1)))
 
 
 if __name__ == '__main__':

+ 1 - 1
regmaxsn/scripts/algorithms/alignAllRot.py

@@ -38,7 +38,7 @@ for val in vals:
 
 for expInd, expName in enumerate(expNames):
     if refInd != expInd:
-        print('Doing ' + expName + ' Rot')
+        print(('Doing ' + expName + ' Rot'))
 
         SWC2Align = os.path.join(dirPath, expName + '.swc')
 

+ 1 - 1
regmaxsn/scripts/algorithms/alignAllScale.py

@@ -41,7 +41,7 @@ for val in vals:
 
 for expInd, expName in enumerate(expNames):
     if refInd != expInd:
-        print('Doing ' + expName + ' Scale')
+        print(('Doing ' + expName + ' Scale'))
 
         SWC2Align = os.path.join(dirPath, expName + '.swc')
 

+ 1 - 1
regmaxsn/scripts/algorithms/alignAllTrans.py

@@ -40,7 +40,7 @@ for val in vals:
 
 for expInd, expName in enumerate(expNames):
     if refInd != expInd:
-        print('Doing ' + expName + ' Scale')
+        print(('Doing ' + expName + ' Scale'))
 
         SWC2Align = os.path.join(dirPath, expName + '.swc')
 

+ 4 - 4
regmaxsn/scripts/algorithms/pcaBasedReg.py

@@ -9,7 +9,7 @@ import sys
 
 def pca_based(parFile):
 
-    ch = raw_input('Using parameter File {}.\n Continue?(y/n)'.format(parFile))
+    ch = input('Using parameter File {}.\n Continue?(y/n)'.format(parFile))
 
     if ch != 'y':
         print('User Abort!')
@@ -21,8 +21,8 @@ def pca_based(parFile):
     for parInd, pars in enumerate(parsList):
 
         print('Current Parameters:')
-        for parN, parV in pars.iteritems():
-            print('{}: {}'.format(parN, parV))
+        for parN, parV in pars.items():
+            print(('{}: {}'.format(parN, parV)))
 
         refSWC = pars['refSWC']
         testSWC = pars['testSWC']
@@ -88,7 +88,7 @@ def pca_based(parFile):
                                            )
 
             else:
-                print('Specified partsDir {} not found'.format(inPartsDir))
+                print(('Specified partsDir {} not found'.format(inPartsDir)))
 
         with open(resSolFile, 'w') as fle:
             json.dump({'transMat': totalTransform.tolist(), 'bestVal': bestVal,

+ 4 - 4
regmaxsn/scripts/analysis/calcD2Metrics.py

@@ -40,7 +40,7 @@ class DataSet(object):
         allMinDists = {}
         thrash, resamRefPts = resampleSWC(self.refSWC, minLen)
 
-        for testSWCSetName, testSWCSet in self.testSWCSets.iteritems():
+        for testSWCSetName, testSWCSet in self.testSWCSets.items():
 
             minDists = np.empty((resamRefPts.shape[0], len(testSWCSet)))
 
@@ -185,12 +185,12 @@ allMinDistStats = {}
 fig1, ax1 = plt.subplots(figsize=(14, 11.2))
 fig2, ax2 = plt.subplots(figsize=(14, 11.2))
 
-for dataSetName, dataSet in dataSets.iteritems():
+for dataSetName, dataSet in dataSets.items():
 
     temp = dataSet.calcMinDists(0.1)
     minDists[dataSetName] = temp
     dataSetMinDists = []
-    for methodName, testSetMinDists in temp.iteritems():
+    for methodName, testSetMinDists in temp.items():
         methodMinDistStats = pd.DataFrame(data=None,
                                           columns=['Mean of minimum distances(um)',
                                                    'Standard Deviation of \nminimum distances(um)',
@@ -204,7 +204,7 @@ for dataSetName, dataSet in dataSets.iteritems():
 
     allMinDistStats[dataSetName] = pd.concat(dataSetMinDists)
 
-minDistsStats1DF = pd.concat(allMinDistStats.itervalues())
+minDistsStats1DF = pd.concat(iter(allMinDistStats.values()))
 
 sns.boxplot(x='Group Name', y='Mean of minimum distances(um)', hue='Method', data=minDistsStats1DF,
             ax=ax1, whis='range')

+ 5 - 6
regmaxsn/scripts/analysis/compareRegPerfAA1.py

@@ -125,7 +125,7 @@ def saveData(outXLFile):
         initRef = case["initRef"]
         expNameLambdas = case['expNameLambdas']
 
-        for (resDirLabel, resDir) in resDirs.iteritems():
+        for (resDirLabel, resDir) in resDirs.items():
 
             outFiles = []
             expNameLambda = expNameLambdas[resDirLabel]
@@ -136,11 +136,11 @@ def saveData(outXLFile):
                 if os.path.isfile(outFile):
                     outFiles.append(outFile)
                 else:
-                    print("{} not found. Ignoring it.".format(outFile))
+                    print(("{} not found. Ignoring it.".format(outFile)))
 
             if outFiles:
 
-                print("Collecting data for resDirLabel={}, initRef={}".format(resDirLabel, initRef))
+                print(("Collecting data for resDirLabel={}, initRef={}".format(resDirLabel, initRef)))
 
                 metric = occupancyEMD(outFiles, voxelSize)
 
@@ -177,7 +177,7 @@ def saveData(outXLFile):
                 # maxDistStatsDF = maxDistStatsDF.append(tempDF, ignore_index=True)
 
             else:
-                print("No usable SWCs found in {}".format(resDir))
+                print(("No usable SWCs found in {}".format(resDir)))
 
     metricsDF.to_excel(outXLFile)
 
@@ -229,5 +229,4 @@ if __name__ == "__main__":
     elif sys.argv[1] == "plot":
         fig = plotData(sys.argv[2])
     else:
-        raise(ValueError("Improper Usage! Please use as:\n"
-                         "python {fName} save <outFile> or python {fName} plot <inFile>".format(fName=sys.argv[0])))
+        raise ValueError

+ 5 - 6
regmaxsn/scripts/analysis/compareRegPerfLLC.py

@@ -120,7 +120,7 @@ def saveData(outXLFile):
         initRef = case["initRef"]
         expNameLambdas = case['expNameLambdas']
 
-        for (resDirLabel, resDir) in resDirs.iteritems():
+        for (resDirLabel, resDir) in resDirs.items():
 
             outFiles = []
             expNameLambda = expNameLambdas[resDirLabel]
@@ -131,11 +131,11 @@ def saveData(outXLFile):
                 if os.path.isfile(outFile):
                     outFiles.append(outFile)
                 else:
-                    print("{} not found. Ignoring it.".format(outFile))
+                    print(("{} not found. Ignoring it.".format(outFile)))
 
             if outFiles:
 
-                print("Collecting data for resDirLabel={}, initRef={}".format(resDirLabel, initRef))
+                print(("Collecting data for resDirLabel={}, initRef={}".format(resDirLabel, initRef)))
 
                 metric = occupancyEMD(outFiles, voxelSize)
 
@@ -172,7 +172,7 @@ def saveData(outXLFile):
                 # maxDistStatsDF = maxDistStatsDF.append(tempDF, ignore_index=True)
 
             else:
-                print("No usable SWCs found in {}".format(resDir))
+                print(("No usable SWCs found in {}".format(resDir)))
 
     metricsDF.to_excel(outXLFile)
 
@@ -223,5 +223,4 @@ if __name__ == "__main__":
     elif sys.argv[1] == "plot":
         fig = plotData(sys.argv[2])
     else:
-        raise(ValueError("Improper Usage! Please use as:\n"
-                         "python {fName} save <outFile> or python {fName} plot <inFile>".format(fName=sys.argv[0])))
+        raise ValueError

+ 5 - 6
regmaxsn/scripts/analysis/compareRegPerfOMB.py

@@ -125,7 +125,7 @@ def saveData(outXLFile):
         initRef = case["initRef"]
         expNameLambdas = case['expNameLambdas']
 
-        for (resDirLabel, resDir) in resDirs.iteritems():
+        for (resDirLabel, resDir) in resDirs.items():
 
             outFiles = []
             expNameLambda = expNameLambdas[resDirLabel]
@@ -136,11 +136,11 @@ def saveData(outXLFile):
                 if os.path.isfile(outFile):
                     outFiles.append(outFile)
                 else:
-                    print("{} not found. Ignoring it.".format(outFile))
+                    print(("{} not found. Ignoring it.".format(outFile)))
 
             if outFiles:
 
-                print("Collecting data for resDirLabel={}, initRef={}".format(resDirLabel, initRef))
+                print(("Collecting data for resDirLabel={}, initRef={}".format(resDirLabel, initRef)))
 
                 metric = occupancyEMD(outFiles, voxelSize)
 
@@ -177,7 +177,7 @@ def saveData(outXLFile):
                 # maxDistStatsDF = maxDistStatsDF.append(tempDF, ignore_index=True)
 
             else:
-                print("No usable SWCs found in {}".format(resDir))
+                print(("No usable SWCs found in {}".format(resDir)))
 
     metricsDF.to_excel(outXLFile)
 
@@ -228,5 +228,4 @@ if __name__ == "__main__":
     elif sys.argv[1] == "plot":
         fig = plotData(sys.argv[2])
     else:
-        raise(ValueError("Improper Usage! Please use as:\n"
-                         "python {fName} save <outFile> or python {fName} plot <inFile>".format(fName=sys.argv[0])))
+        raise ValueError

+ 5 - 6
regmaxsn/scripts/analysis/compareRegPerfOPSInt.py

@@ -139,7 +139,7 @@ def saveData(outXLFile):
         initRef = case["initRef"]
         expNameLambdas = case['expNameLambdas']
 
-        for (resDirLabel, resDir) in resDirs.iteritems():
+        for (resDirLabel, resDir) in resDirs.items():
 
             outFiles = []
             expNameLambda = expNameLambdas[resDirLabel]
@@ -150,11 +150,11 @@ def saveData(outXLFile):
                 if os.path.isfile(outFile):
                     outFiles.append(outFile)
                 else:
-                    print("{} not found. Ignoring it.".format(outFile))
+                    print(("{} not found. Ignoring it.".format(outFile)))
 
             if outFiles:
 
-                print("Collecting data for resDirLabel={}, initRef={}".format(resDirLabel, initRef))
+                print(("Collecting data for resDirLabel={}, initRef={}".format(resDirLabel, initRef)))
 
                 metric = occupancyEMD(outFiles, voxelSize)
 
@@ -190,7 +190,7 @@ def saveData(outXLFile):
                 # maxDistStatsDF = maxDistStatsDF.append(tempDF, ignore_index=True)
 
             else:
-                print("No usable SWCs found in {}".format(resDir))
+                print(("No usable SWCs found in {}".format(resDir)))
 
 
     metricsDF.to_excel(outXLFile)
@@ -241,5 +241,4 @@ if __name__ == "__main__":
     elif sys.argv[1] == "plot":
         fig = plotData(sys.argv[2])
     else:
-        raise(ValueError("Improper Usage! Please use as:\n"
-                         "python {fName} save <outFile> or python {fName} plot <inFile>".format(fName=sys.argv[0])))
+        raise ValueError

+ 5 - 6
regmaxsn/scripts/analysis/compareRegPerfRAL.py

@@ -120,7 +120,7 @@ def saveData(outXLFile):
         initRef = case["initRef"]
         expNameLambdas = case['expNameLambdas']
 
-        for (resDirLabel, resDir) in resDirs.iteritems():
+        for (resDirLabel, resDir) in resDirs.items():
 
             outFiles = []
             expNameLambda = expNameLambdas[resDirLabel]
@@ -131,11 +131,11 @@ def saveData(outXLFile):
                 if os.path.isfile(outFile):
                     outFiles.append(outFile)
                 else:
-                    print("{} not found. Ignoring it.".format(outFile))
+                    print(("{} not found. Ignoring it.".format(outFile)))
 
             if outFiles:
 
-                print("Collecting data for resDirLabel={}, initRef={}".format(resDirLabel, initRef))
+                print(("Collecting data for resDirLabel={}, initRef={}".format(resDirLabel, initRef)))
 
                 metric = occupancyEMD(outFiles, voxelSize)
 
@@ -172,7 +172,7 @@ def saveData(outXLFile):
                 # maxDistStatsDF = maxDistStatsDF.append(tempDF, ignore_index=True)
 
             else:
-                print("No usable SWCs found in {}".format(resDir))
+                print(("No usable SWCs found in {}".format(resDir)))
 
     metricsDF.to_excel(outXLFile)
 
@@ -223,5 +223,4 @@ if __name__ == "__main__":
     elif sys.argv[1] == "plot":
         fig = plotData(sys.argv[2])
     else:
-        raise(ValueError("Improper Usage! Please use as:\n"
-                         "python {fName} save <outFile> or python {fName} plot <inFile>".format(fName=sys.argv[0])))
+        raise ValueError

+ 5 - 6
regmaxsn/scripts/analysis/compareRegPerfRAL_min5.py

@@ -120,7 +120,7 @@ def saveData(outXLFile):
         initRef = case["initRef"]
         expNameLambdas = case['expNameLambdas']
 
-        for (resDirLabel, resDir) in resDirs.iteritems():
+        for (resDirLabel, resDir) in resDirs.items():
 
             outFiles = []
             expNameLambda = expNameLambdas[resDirLabel]
@@ -131,11 +131,11 @@ def saveData(outXLFile):
                 if os.path.isfile(outFile):
                     outFiles.append(outFile)
                 else:
-                    print("{} not found. Ignoring it.".format(outFile))
+                    print(("{} not found. Ignoring it.".format(outFile)))
 
             if outFiles:
 
-                print("Collecting data for resDirLabel={}, initRef={}".format(resDirLabel, initRef))
+                print(("Collecting data for resDirLabel={}, initRef={}".format(resDirLabel, initRef)))
 
                 metric = occupancyEMD(outFiles, voxelSize)
 
@@ -160,7 +160,7 @@ def saveData(outXLFile):
                 # maxDistStatsDF = maxDistStatsDF.append(tempDF, ignore_index=True)
 
             else:
-                print("No usable SWCs found in {}".format(resDir))
+                print(("No usable SWCs found in {}".format(resDir)))
 
     metricsDF.to_excel(outXLFile)
 
@@ -207,5 +207,4 @@ if __name__ == "__main__":
     elif sys.argv[1] == "plot":
         fig = plotData(sys.argv[2])
     else:
-        raise(ValueError("Improper Usage! Please use as:\n"
-                         "python {fName} save <outFile> or python {fName} plot <inFile>".format(fName=sys.argv[0])))
+        raise ValueError

+ 14 - 14
regmaxsn/scripts/analysis/pairwiseDistanceStats.py

@@ -28,7 +28,7 @@ def pairwiseDistanceStats(parFile):
         testName = resFile[:-4]
         thresh = par['gridSizes'][-1]
 
-        print('Doing ' + repr((refSWC, resFile)))
+        print(('Doing ' + repr((refSWC, resFile))))
 
         refPts = np.loadtxt(refSWC)[:, 2:5]
         testPtsFull = np.loadtxt(resFile)
@@ -37,7 +37,7 @@ def pairwiseDistanceStats(parFile):
 
         if refPts.shape[0] != testPts.shape[0]:
 
-            print('Number of points do not match for ' + refSWC + 'and' + resFile)
+            print(('Number of points do not match for ' + refSWC + 'and' + resFile))
             continue
 
         allSizes.append(refPts.shape[0])
@@ -50,7 +50,7 @@ def pairwiseDistanceStats(parFile):
                                      ignore_index=True)
 
         t, p = sign_test(ptDiff, thresh)
-        print(t, p)
+        print((t, p))
         oneSidedP = 0.5 * p
         signCloserThanSmallestVoxelSize = t < 0 and oneSidedP < 0.01
 
@@ -69,15 +69,15 @@ def pairwiseDistanceStats(parFile):
         signCloserThanSmalledVoxelSizeDF = signCloserThanSmalledVoxelSizeDF.append(tempDict,
                                                                                    ignore_index=True)
 
-    print("Jobs with "
+    print(("Jobs with "
           "pairwise distance "
           "significantly smaller "
-          "than lowest voxel size: {} out of {}".format(passCount, len(parsList)))
+          "than lowest voxel size: {} out of {}".format(passCount, len(parsList))))
 
-    print("Jobs with "
+    print(("Jobs with "
           "pairwise distance "
           "not significantly greater "
-          "than lowest voxel size: {} out of {}".format(passCountNGT, len(parsList)))
+          "than lowest voxel size: {} out of {}".format(passCountNGT, len(parsList))))
 
     allEqualSizeThresh = (allSizes.count(allSizes[0]) == len(allSizes)) and \
                          (allThreshs.count(allThreshs[0]) == len(allThreshs))
@@ -91,7 +91,7 @@ def pairwiseDistanceStats(parFile):
 
         for nodeInd, (node, nodeDistsDF) in enumerate(transErrsGBNodeID):
 
-            print("Doing node {}, {} of {}".format(node, nodeInd, len(transErrsGBNodeID.indices)))
+            print(("Doing node {}, {} of {}".format(node, nodeInd, len(transErrsGBNodeID.indices))))
             t, p = sign_test(nodeDistsDF['Pairwise Distance in $\mu$m'].astype(float), allThreshs[0])
             oneSidedP = 0.5 * p
             signCloserThanSmallestVoxelSize = t < 0 and oneSidedP < 0.01
@@ -99,20 +99,20 @@ def pairwiseDistanceStats(parFile):
             nodeWiseSignCloserThanSmallestVoxelSize.append(signCloserThanSmallestVoxelSize)
 
 
-        print("Jobs with "
+        print(("Jobs with "
               "pairwise distance "
               "significantly smaller "
-              "than lowest voxel size: {} out of {}".format(passCount, len(parsList)))
+              "than lowest voxel size: {} out of {}".format(passCount, len(parsList))))
 
-        print("Jobs with "
+        print(("Jobs with "
               "pairwise distance "
               "not significantly larger "
-              "than lowest voxel size: {} out of {}".format(sum(nodeWiseNotSignLargerThanSmallestVoxelSize), len(parsList)))
+              "than lowest voxel size: {} out of {}".format(sum(nodeWiseNotSignLargerThanSmallestVoxelSize), len(parsList))))
 
-        print("Nodes with pairwise distance "
+        print(("Nodes with pairwise distance "
           "significantly smaller "
           "than lowest voxel size: {} out of {}".format(sum(nodeWiseSignCloserThanSmallestVoxelSize),
-                                                        len(nodeWiseSignCloserThanSmallestVoxelSize)))
+                                                        len(nodeWiseSignCloserThanSmallestVoxelSize))))
 
 
 # ----------------------------------------------------------------------------------------------------------------------

+ 13 - 13
regmaxsn/scripts/analysis/pairwiseDistanceStatsNN.py

@@ -30,7 +30,7 @@ def pairwiseDistanceStats(parFile):
         testName = resFile[:-4]
         thresh = par['gridSizes'][-1]
 
-        print('Doing ' + repr((refSWC, resFile)))
+        print(('Doing ' + repr((refSWC, resFile))))
 
         refPts = np.loadtxt(refSWC)[:, 2:5]
         testPtsFull = np.loadtxt(resFile)
@@ -46,7 +46,7 @@ def pairwiseDistanceStats(parFile):
                                      ignore_index=True)
 
         t, p = sign_test(minDists, thresh)
-        print(minDists.shape)
+        print((minDists.shape))
         oneSidedP = 0.5 * p
         signCloserThanSmallestVoxelSize = t < 0 and oneSidedP < 0.01
 
@@ -65,15 +65,15 @@ def pairwiseDistanceStats(parFile):
         signCloserThanSmalledVoxelSizeDF = signCloserThanSmalledVoxelSizeDF.append(tempDict,
                                                                                    ignore_index=True)
 
-    print("Jobs with "
+    print(("Jobs with "
           "pairwise distance "
           "significantly smaller "
-          "than lowest voxel size: {} out of {}".format(passCount, len(parsList)))
+          "than lowest voxel size: {} out of {}".format(passCount, len(parsList))))
 
-    print("Jobs with "
+    print(("Jobs with "
           "pairwise distance "
           "not significantly greater "
-          "than lowest voxel size: {} out of {}".format(passCountNGT, len(parsList)))
+          "than lowest voxel size: {} out of {}".format(passCountNGT, len(parsList))))
 
     allEqualSizeThresh = (allSizes.count(allSizes[0]) == len(allSizes)) and \
                          (allThreshs.count(allThreshs[0]) == len(allThreshs))
@@ -87,7 +87,7 @@ def pairwiseDistanceStats(parFile):
 
         for nodeInd, (node, nodeDistsDF) in enumerate(transErrsGBNodeID):
 
-            print("Doing node {}, {} of {}".format(node, nodeInd, len(transErrsGBNodeID.indices)))
+            print(("Doing node {}, {} of {}".format(node, nodeInd, len(transErrsGBNodeID.indices))))
             t, p = sign_test(nodeDistsDF['Pairwise Distance in $\mu$m'].astype(float), allThreshs[0])
             oneSidedP = 0.5 * p
             signCloserThanSmallestVoxelSize = t < 0 and oneSidedP < 0.01
@@ -95,20 +95,20 @@ def pairwiseDistanceStats(parFile):
             nodeWiseSignCloserThanSmallestVoxelSize.append(signCloserThanSmallestVoxelSize)
 
 
-        print("Jobs with "
+        print(("Jobs with "
               "pairwise distance "
               "significantly smaller "
-              "than lowest voxel size: {} out of {}".format(passCount, len(parsList)))
+              "than lowest voxel size: {} out of {}".format(passCount, len(parsList))))
 
-        print("Jobs with "
+        print(("Jobs with "
               "pairwise distance "
               "not significantly larger "
-              "than lowest voxel size: {} out of {}".format(sum(nodeWiseNotSignLargerThanSmallestVoxelSize), len(parsList)))
+              "than lowest voxel size: {} out of {}".format(sum(nodeWiseNotSignLargerThanSmallestVoxelSize), len(parsList))))
 
-        print("Nodes with pairwise distance "
+        print(("Nodes with pairwise distance "
           "significantly smaller "
           "than lowest voxel size: {} out of {}".format(sum(nodeWiseSignCloserThanSmallestVoxelSize),
-                                                        len(nodeWiseSignCloserThanSmallestVoxelSize)))
+                                                        len(nodeWiseSignCloserThanSmallestVoxelSize))))
 
 
 # ----------------------------------------------------------------------------------------------------------------------

+ 20 - 20
regmaxsn/scripts/analysis/pairwiseDistanceStatsVsAnisotropicScaling.py

@@ -26,7 +26,7 @@ def pairwiseDistanceStats(parFile, anisotropicScalingThresh):
         testName = resFile[:-4]
         thresh = par['gridSizes'][-1]
 
-        print('Doing ' + repr((refSWC, resFile)))
+        print(('Doing ' + repr((refSWC, resFile))))
 
         refPts = np.loadtxt(refSWC)[:, 2:5]
         testPtsFull = np.loadtxt(resFile)
@@ -35,7 +35,7 @@ def pairwiseDistanceStats(parFile, anisotropicScalingThresh):
 
         if refPts.shape[0] != testPts.shape[0]:
 
-            print('Number of points do not match for ' + refSWC + 'and' + resFile)
+            print(('Number of points do not match for ' + refSWC + 'and' + resFile))
             continue
 
         allSizes.append(refPts.shape[0])
@@ -49,7 +49,7 @@ def pairwiseDistanceStats(parFile, anisotropicScalingThresh):
                 pars = json.load(fle)
                 scales = np.array(pars['scale'])
         else:
-            raise (IOError('File not found: {}'.format(origJSON)))
+            raise IOError
 
         scalesOrdered = np.sort(scales)
         scalesRelative = np.mean([scalesOrdered[0] / scalesOrdered[1],
@@ -93,7 +93,7 @@ def pairwiseDistanceStats(parFile, anisotropicScalingThresh):
 
         for nodeInd, (node, nodeDistsDF) in enumerate(transErrsGBNodeID):
 
-            print("Doing node {}, {} of {}".format(node, nodeInd, len(transErrsGBNodeID.indices)))
+            print(("Doing node {}, {} of {}".format(node, nodeInd, len(transErrsGBNodeID.indices))))
             nodeDistsAll = nodeDistsDF['Pairwise Distance in $\mu$m'].astype(float)
             t, p = sign_test(nodeDistsAll, allThreshs[0])
             oneSidedP = 0.5 * p
@@ -128,26 +128,26 @@ def pairwiseDistanceStats(parFile, anisotropicScalingThresh):
         nodesCloserThanCountAniso = nodeWiseStatsDF["[AnisoFiltered] Pairwise distance significantly smaller than smallest voxel size"].sum()
         nodesNotFartherThanCountAniso = nodeWiseStatsDF["[AnisoFiltered] Pairwise distance not significantly larger than smallest voxel size"].sum()
 
-        print("[All] Nodes with pairwise distance "
+        print(("[All] Nodes with pairwise distance "
               "significantly smaller "
               "than lowest voxel size: {} out of {}, {}%".format(nodesCloserThanCount,
                                                             nodeWiseStatsDF.shape[0],
-                                                                 100 * nodesCloserThanCount / nodeWiseStatsDF.shape[0]))
-        print("[All] Nodes with pairwise distance "
+                                                                 100 * nodesCloserThanCount / nodeWiseStatsDF.shape[0])))
+        print(("[All] Nodes with pairwise distance "
               "not significantly larger "
               "than lowest voxel size: {} out of {}, {}%".format(nodesNotFartherThanCount,
                                                             nodeWiseStatsDF.shape[0],
-                                                            100 * nodesNotFartherThanCount / nodeWiseStatsDF.shape[0]))
-        print("[Aniso] Nodes with pairwise distance "
+                                                            100 * nodesNotFartherThanCount / nodeWiseStatsDF.shape[0])))
+        print(("[Aniso] Nodes with pairwise distance "
               "significantly smaller "
               "than lowest voxel size: {} out of {}, {}%".format(nodesCloserThanCountAniso,
                                                             nodeWiseStatsDF.shape[0],
-                                                                 100 * nodesCloserThanCountAniso / nodeWiseStatsDF.shape[0]))
-        print("[Aniso] Nodes with pairwise distance "
+                                                                 100 * nodesCloserThanCountAniso / nodeWiseStatsDF.shape[0])))
+        print(("[Aniso] Nodes with pairwise distance "
               "not significantly larger "
               "than lowest voxel size: {} out of {}, {}%".format(nodesNotFartherThanCountAniso,
                                                             nodeWiseStatsDF.shape[0],
-                                                            100 * nodesNotFartherThanCountAniso / nodeWiseStatsDF.shape[0]))
+                                                            100 * nodesNotFartherThanCountAniso / nodeWiseStatsDF.shape[0])))
 
         signStatsAniso = signStats.loc[signStats["Anisotropic Scaling Level"] > anisotropicScalingThresh, :]
         signCloserThanAniso = signStatsAniso["Pairwise distance significantly smaller than smallest voxel size"]
@@ -156,18 +156,18 @@ def pairwiseDistanceStats(parFile, anisotropicScalingThresh):
         notSignFartherAniso = signStatsAniso["Pairwise distance not significantly larger than smallest voxel size"]
         notSignFartherCountAniso = notSignFartherAniso.sum()
 
-        print("[Aniso Filtered] Jobs with "
+        print(("[Aniso Filtered] Jobs with "
               "pairwise distance "
               "significantly smaller "
               "than lowest voxel size: {} out of {}, {}%".format(signCloserThanCountAniso,
                                                             signStatsAniso.shape[0],
-                                                            100 * signCloserThanCountAniso / signStatsAniso.shape[0]))
-        print("[Aniso Filtered] Jobs with "
+                                                            100 * signCloserThanCountAniso / signStatsAniso.shape[0])))
+        print(("[Aniso Filtered] Jobs with "
               "pairwise distance "
               "not significantly larger "
               "than lowest voxel size: {} out of {}, {}%".format(notSignFartherCountAniso,
                                                             signStatsAniso.shape[0],
-                                                                 100 * notSignFartherCountAniso / signStatsAniso.shape[0]))
+                                                                 100 * notSignFartherCountAniso / signStatsAniso.shape[0])))
 
 
     signCloserThan = signStats["Pairwise distance significantly smaller than smallest voxel size"]
@@ -176,16 +176,16 @@ def pairwiseDistanceStats(parFile, anisotropicScalingThresh):
     notSignFarther = signStats["Pairwise distance not significantly larger than smallest voxel size"]
     notSignFartherCount = notSignFarther.sum()
 
-    print("[All] Jobs with "
+    print(("[All] Jobs with "
           "pairwise distance "
           "significantly smaller "
           "than lowest voxel size: {} out of {}, {}%".format(signCloserThanCount, len(parsList),
-                                                             100 * signCloserThanCount / len(parsList)))
-    print("[All] Jobs with "
+                                                             100 * signCloserThanCount / len(parsList))))
+    print(("[All] Jobs with "
           "pairwise distance "
           "not significantly larger "
           "than lowest voxel size: {} out of {}, {}%".format(notSignFartherCount, len(parsList),
-                                                             100 * notSignFartherCount / len(parsList)))
+                                                             100 * notSignFartherCount / len(parsList))))
 
 
 # ----------------------------------------------------------------------------------------------------------------------

+ 5 - 5
regmaxsn/scripts/analysis/plotCHBasedMetricVsAverageRotScaleTransform.py

@@ -33,7 +33,7 @@ figs = []
 def saveData():
     for suffixInd, suffix in enumerate(suffixes):
 
-        print("Doing {}".format(labels[suffixInd]))
+        print(("Doing {}".format(labels[suffixInd])))
 
         parSWCDict = {}
 
@@ -48,17 +48,17 @@ def saveData():
             jsonPars = transJSON[jsonKey]
             parSWCDict[jsonPars[1]] = outFile
 
-        allPars = parSWCDict.keys()
+        allPars = list(parSWCDict.keys())
         allParsSorted = np.sort(allPars)
 
         maxStepSize = int(np.floor(float(N) / float(swcSetSize)))
         baseSet = np.arange(0, swcSetSize)
         for stepSize in range(1, maxStepSize + 1):
-            print("Doing StepSize {}/{}".format(stepSize, maxStepSize))
+            print(("Doing StepSize {}/{}".format(stepSize, maxStepSize)))
             windowSlideSize = int(stepSize * swcSetSize / 2)
-            windowStarts = range(0, N - stepSize * swcSetSize + 1, windowSlideSize)
+            windowStarts = list(range(0, N - stepSize * swcSetSize + 1, windowSlideSize))
             for windowStart in windowStarts:
-                print("Doing Window start {}/{}".format(windowStart, windowStarts))
+                print(("Doing Window start {}/{}".format(windowStart, windowStarts)))
                 pars = allParsSorted[windowStart + stepSize * baseSet]
                 swcFiles = [parSWCDict[par] for par in pars]
                 metric = maxDistEMD(swcFiles)

+ 2 - 2
regmaxsn/scripts/analysis/plotCHDBasedMeasureVsIterations.py

@@ -29,7 +29,7 @@ def plotMaxDistEMDVsIteration(parFile, parNames):
         centroidAlignedSWCs = [os.path.join(resDir, '{}{}.swc'.format(expName, 0)) for expName in expNames]
 
         for iterInd in range(nIter + 1):
-            print('Doing {}/{}'.format(iterInd + 1, nIter + 1))
+            print(('Doing {}/{}'.format(iterInd + 1, nIter + 1)))
             iterSWCs = [os.path.join(resDir, '{}{}.swc'.format(expName, iterInd)) for expName in expNames]
 
             metric = maxDistEMD(iterSWCs, centroidAlignedSWCs)
@@ -54,7 +54,7 @@ if __name__ == '__main__':
 
     parFile = sys.argv[1]
     figs = plotMaxDistEMDVsIteration(parFile, RegMaxSNParNames)
-    raw_input('Press any key to close figures and quit:')
+    input('Press any key to close figures and quit:')
 
 
 

+ 5 - 5
regmaxsn/scripts/analysis/plotOccupancyBasedMeasureVsAverageRotScaleTransform.py

@@ -34,7 +34,7 @@ figs = []
 def saveData():
     for suffixInd, suffix in enumerate(suffixes):
 
-        print("Doing {}".format(labels[suffixInd]))
+        print(("Doing {}".format(labels[suffixInd])))
 
         parSWCDict = {}
 
@@ -49,17 +49,17 @@ def saveData():
             jsonPars = transJSON[jsonKey]
             parSWCDict[jsonPars[1]] = outFile
 
-        allPars = parSWCDict.keys()
+        allPars = list(parSWCDict.keys())
         allParsSorted = np.sort(allPars)
 
         maxStepSize = int(np.floor(float(N) / float(swcSetSize)))
         baseSet = np.arange(0, swcSetSize)
         for stepSize in range(1, maxStepSize + 1):
-            print("Doing StepSize {}/{}".format(stepSize, maxStepSize))
+            print(("Doing StepSize {}/{}".format(stepSize, maxStepSize)))
             windowSlideSize = int(stepSize * swcSetSize / 2)
-            windowStarts = range(0, N - stepSize * swcSetSize + 1, windowSlideSize)
+            windowStarts = list(range(0, N - stepSize * swcSetSize + 1, windowSlideSize))
             for windowStart in windowStarts:
-                print("Doing Window start {}/{}".format(windowStart, windowStarts))
+                print(("Doing Window start {}/{}".format(windowStart, windowStarts)))
                 pars = allParsSorted[windowStart + stepSize * baseSet]
                 swcFiles = [parSWCDict[par] for par in pars]
                 metric = occupancyEMD(swcFiles, voxelSize)

+ 4 - 4
regmaxsn/scripts/analysis/plotOccupancyMeasureVsIterations.py

@@ -27,7 +27,7 @@ def plotMaxDistEMDVsIteration(parFile, parNames):
         expNames = [os.path.split(swc)[1][:-4] for swc in swcList]
 
         if os.path.isdir(resDir):
-            print('Current refSWC={}'.format(pars["initRefSWC"]))
+            print(('Current refSWC={}'.format(pars["initRefSWC"])))
 
             iters = sorted([int(fle[3:-4]) for fle in os.listdir(resDir) if fle.find('ref') == 0])
             nIter = max(iters)
@@ -35,14 +35,14 @@ def plotMaxDistEMDVsIteration(parFile, parNames):
             # distributionsDF = pd.DataFrame()
 
             for iterInd in range(nIter + 1):
-                print('Doing {}/{}'.format(iterInd + 1, nIter + 1))
+                print(('Doing {}/{}'.format(iterInd + 1, nIter + 1)))
                 iterSWCs = [os.path.join(resDir, '{}{}.swc'.format(expName, iterInd)) for expName in expNames]
 
                 for gridSize in gridSizes:
                     metric = occupancyEMD(iterSWCs, gridSize)
                     occupancyDist = calcOccupancyDistribution(iterSWCs, gridSize)
                     tempDict = {}
-                    for k, v in occupancyDist.iteritems():
+                    for k, v in occupancyDist.items():
                         tempDict["Occupancy"] = k
                         tempDict["Occupancy PMF"] = v
                         tempDict["gridSize"] = gridSize
@@ -89,7 +89,7 @@ if __name__ == '__main__':
 
     parFile = sys.argv[1]
     figs = plotMaxDistEMDVsIteration(parFile, RegMaxSNParNames)
-    raw_input('Press any key to close figures and quit:')
+    input('Press any key to close figures and quit:')
 
 
 

+ 5 - 5
regmaxsn/scripts/analysis/plotPairwiseDistance.py

@@ -28,14 +28,14 @@ def plotPairwiseDistances(parFile):
         testName = resFile[:-4]
         thresh = par['gridSizes'][-1]
 
-        print('Doing ' + repr((refSWC, resFile)))
+        print(('Doing ' + repr((refSWC, resFile))))
 
         refPts = np.loadtxt(refSWC)[:, 2:5]
         testPts = np.loadtxt(resFile)[:, 2:5]
 
         if refPts.shape[0] != testPts.shape[0]:
 
-            print('Number of points do not match for ' + refSWC + 'and' + resFile)
+            print(('Number of points do not match for ' + refSWC + 'and' + resFile))
             continue
 
         ptDiff = np.linalg.norm(refPts - testPts, axis=1)
@@ -56,7 +56,7 @@ def plotPairwiseDistances(parFile):
 
         sns.boxplot(x='Exp. Name', y='Pairwise Distance in $\mu$m',
                     ax=ax, data=transErrs, whis='range', color=sns.color_palette()[0])
-        ax1.plot(range(regErrs.size), regErrs['\% of points closer than\n lowest grid size'],
+        ax1.plot(list(range(regErrs.size)), regErrs['\% of points closer than\n lowest grid size'],
                  color=sns.color_palette()[0], marker='o', linestyle='-', ms=10)
 
     ax.set_xlim(-1, len(regErrs))
@@ -68,7 +68,7 @@ def plotPairwiseDistances(parFile):
 
     ax1.set_xlim(-1, len(regErrs))
     ax1.set_ylim(-10, 110)
-    ax1.set_xticks(range(regErrs.size))
+    ax1.set_xticks(list(range(regErrs.size)))
     ax1.set_xticklabels(['par{}'.format(x) for x in range(len(parsList))], rotation=90)
     ax1.set_ylabel('\% of points closer than\n lowest grid size')
 
@@ -90,5 +90,5 @@ if __name__ == '__main__':
 
     parFile = sys.argv[1]
     figs = plotPairwiseDistances(parFile)
-    raw_input('Press any key to close figures and quit:')
+    input('Press any key to close figures and quit:')
 

+ 4 - 4
regmaxsn/scripts/analysis/plotPairwiseDistanceNN.py

@@ -39,7 +39,7 @@ def plotPairwiseDistancesNN(parFile):
         testName = resFile[:-4]
         thresh = par['gridSizes'][-1]
 
-        print('Doing ' + repr((refSWC, resFile)))
+        print(('Doing ' + repr((refSWC, resFile))))
 
         refPts = np.loadtxt(refSWC)[:, 2:5]
         testPts = np.loadtxt(resFile)[:, 2:5]
@@ -64,7 +64,7 @@ def plotPairwiseDistancesNN(parFile):
 
         sns.boxplot(x='Exp. Name', y='Pairwise Distance in $\mu$m',
                     ax=ax, data=transErrs, color=sns.color_palette()[0], whis='range')
-        ax1.plot(range(regErrs.size), regErrs['\% of points closer than\n lowest grid size'],
+        ax1.plot(list(range(regErrs.size)), regErrs['\% of points closer than\n lowest grid size'],
                  color=sns.color_palette()[0], marker='o', linestyle='-', ms=10)
 
     ax.set_xlim(-1, len(regErrs))
@@ -74,7 +74,7 @@ def plotPairwiseDistancesNN(parFile):
 
     ax1.set_xlim(-1, len(regErrs))
     ax1.set_ylim(-10, 110)
-    ax1.set_xticks(range(regErrs.size))
+    ax1.set_xticks(list(range(regErrs.size)))
     ax1.set_xticklabels(['job {}'.format(x) for x in range(len(parsList))], rotation=90)
     ax1.set_ylabel('\% of points closer than\n lowest grid size')
 
@@ -96,7 +96,7 @@ if __name__ == '__main__':
 
     parFile = sys.argv[1]
     figs = plotPairwiseDistancesNN(parFile)
-    raw_input('Press any key to close figures and quit:')
+    input('Press any key to close figures and quit:')
 
 
 

+ 7 - 6
regmaxsn/scripts/analysis/plotReg-MaxS-NIOUTraj.py

@@ -5,6 +5,7 @@ from matplotlib import pyplot as plt
 from regmaxsn.core.matplotlibRCParams import mplPars
 import seaborn as sns
 import sys
+from functools import reduce
 
 
 def plotIOUTraj(parFile, parNames):
@@ -35,14 +36,14 @@ def plotIOUTraj(parFile, parNames):
         figs.append(fig)
 
         for gridInd, gridSize in enumerate(gridSizes):
-            print('Doing gridSize = {} of {}'.format(gridSize, gridSizes))
+            print(('Doing gridSize = {} of {}'.format(gridSize, gridSizes)))
             nInts = []
             nUnions = []
             nIOUs = []
 
             for iterInd in range(nIter + 1):
 
-                print('Doing {}/{}'.format(iterInd + 1, nIter + 1))
+                print(('Doing {}/{}'.format(iterInd + 1, nIter + 1)))
                 alignedSWCs = [os.path.join(resDir, '{}{}.swc'.format(expName, iterInd)) for expName in expNames]
 
                 indVoxs = []
@@ -66,17 +67,17 @@ def plotIOUTraj(parFile, parNames):
                 nIOUs.append(nIOU)
 
             with sns.axes_style('darkgrid'):
-                ax0.plot(range(nIter + 1), nInts, color=cols[gridInd], marker='o', ls='-', label=str(gridSize))
+                ax0.plot(list(range(nIter + 1)), nInts, color=cols[gridInd], marker='o', ls='-', label=str(gridSize))
                 ax0.set_xlim(-1, nIter + 2)
                 ax0.set_xlabel('Iteration Number')
                 ax0.set_ylabel('n(Intersection) (nI)')
 
-                ax1.plot(range(nIter + 1), nUnions, color=cols[gridInd], marker='o', ls='-', label=str(gridSize))
+                ax1.plot(list(range(nIter + 1)), nUnions, color=cols[gridInd], marker='o', ls='-', label=str(gridSize))
                 ax1.set_xlim(-1, nIter + 2)
                 ax1.set_xlabel('Iteration Number')
                 ax1.set_ylabel('n(Union) (nU)')
 
-                ax2.plot(range(nIter + 1), nIOUs, color=cols[gridInd], marker='o', ls='-', label=str(gridSize))
+                ax2.plot(list(range(nIter + 1)), nIOUs, color=cols[gridInd], marker='o', ls='-', label=str(gridSize))
                 ax2.set_xlim(-1, nIter + 2)
                 ax2.set_xlabel('Iteration Number')
                 ax2.set_ylabel('1 - nI / nU')
@@ -96,4 +97,4 @@ if __name__ == '__main__':
 
     parFile = sys.argv[1]
     figs = plotIOUTraj(parFile, RegMaxSNParNames)
-    raw_input('Press any key to close figures and quit:')
+    input('Press any key to close figures and quit:')

+ 5 - 5
regmaxsn/scripts/analysis/plotRegMaxSPerfVsNoise.py

@@ -26,7 +26,7 @@ def regmaxsPerfVsNoise(parFile, anisoThresh):
         testSWC = par["testSWC"]
 
         if testSWC.find("NoiseStd") < 0:
-            print("{} has testSWC {} without noise.Ignoring it!".format(parFile, testSWC))
+            print(("{} has testSWC {} without noise.Ignoring it!".format(parFile, testSWC)))
         else:
 
             NoiseStdStrInd = testSWC.find("NoiseStd")
@@ -41,7 +41,7 @@ def regmaxsPerfVsNoise(parFile, anisoThresh):
                     pars = json.load(fle)
                     scales = np.array(pars['scale'])
             else:
-                raise (IOError('File not found: {}'.format(origJSON)))
+                raise IOError
 
             scalesOrdered = np.sort(scales)
             scalesRelative = np.mean([scalesOrdered[0] / scalesOrdered[1],
@@ -52,7 +52,7 @@ def regmaxsPerfVsNoise(parFile, anisoThresh):
             testName = resFile[:-4]
             thresh = par['gridSizes'][-1]
 
-            print('Doing ' + repr((refSWC, resFile)))
+            print(('Doing ' + repr((refSWC, resFile))))
 
             refPts = np.loadtxt(refSWC)[:, 2:5]
             testPtsFull = np.loadtxt(resFile)
@@ -61,7 +61,7 @@ def regmaxsPerfVsNoise(parFile, anisoThresh):
 
             if refPts.shape[0] != testPts.shape[0]:
 
-                print('Number of points do not match for ' + refSWC + 'and' + resFile)
+                print(('Number of points do not match for ' + refSWC + 'and' + resFile))
                 continue
 
 
@@ -121,4 +121,4 @@ if __name__ == '__main__':
     parFile = sys.argv[1]
     anisoThesh = float(sys.argv[2])
     fig = regmaxsPerfVsNoise(parFile, anisoThesh)
-    raw_input("Press any key to close the figure and exit....")
+    input("Press any key to close the figure and exit....")

+ 5 - 5
regmaxsn/scripts/analysis/regErrorVsAnisotropicscaling.py

@@ -37,7 +37,7 @@ def regErrorVsAIScaling(parFile, colFunc=None):
         resFile = par['resFile']
         thresh = par['gridSizes'][-1]
 
-        print('Doing ' + repr((refSWC, resFile)))
+        print(('Doing ' + repr((refSWC, resFile))))
 
         origJSON = testSWC[:-4] + '.json'
 
@@ -46,7 +46,7 @@ def regErrorVsAIScaling(parFile, colFunc=None):
                 pars = json.load(fle)
                 scales = np.array(pars['scale'])
         else:
-            raise(IOError('File not found: {}'.format(origJSON)))
+            raise IOError
 
         scalesOrdered = np.sort(scales)
         scalesRelative = np.mean([scalesOrdered[0] / scalesOrdered[1],
@@ -58,7 +58,7 @@ def regErrorVsAIScaling(parFile, colFunc=None):
 
         if refPts.shape[0] != testPts.shape[0]:
 
-            print('Number of points do not match for ' + refSWC + 'and' + testSWC)
+            print(('Number of points do not match for ' + refSWC + 'and' + testSWC))
             continue
 
 
@@ -79,7 +79,7 @@ def regErrorVsAIScaling(parFile, colFunc=None):
 
                 ax.plot(vals[1], vals[0], color=col, marker='o', ls='None', ms=10)
             except Exception as e:
-                raise(Exception('Problem with plotting. There could be a problem with argument colFunc'))
+                raise Exception
 
     ax.set_xlabel('measure of anisotropic scaling')
     ax.set_ylabel('\% points closer than \nthe lowest grid size')
@@ -96,4 +96,4 @@ if __name__ == '__main__':
 
     parFile = sys.argv[1]
     fig = regErrorVsAIScaling(parFile, colFunc)
-    raw_input('Press any key to close figures and quit:')
+    input('Press any key to close figures and quit:')

+ 3 - 7
regmaxsn/scripts/analysis/saveAverageDensity.py

@@ -37,7 +37,7 @@ def saveAverageDensity(regMaxSParFile, refSWC, outFile, gridUnitSize, sigma, ref
         for swcFile in swcFiles:
 
             data = np.loadtxt(swcFile)
-            mask = map(lambda ptInd: ptInd not in data[:, 6], data[:, 0])
+            mask = [ptInd not in data[:, 6] for ptInd in data[:, 0]]
             masks.append(mask)
 
         densityViz = DensityVizualizations(swcFiles, gridUnitSize, resampleLen, masks=masks,
@@ -85,7 +85,7 @@ def savePlotsTogether(densityDir, outDir):
 
         label = os.path.split(comFile)[1][:-4]
 
-        print("Doing {}".format(label))
+        print(("Doing {}".format(label)))
         compressedData = np.load(comFile)
         density = compressedData['density']
         bins = compressedData['bins']
@@ -238,11 +238,7 @@ if __name__ == "__main__":
         outDir = sys.argv[3]
         savePlotsTogether(densityDir, outDir)
     else:
-        raise(ValueError("Improper Usage! Please use as:\n"
-                         "python {fle} saveData <RegMaxSParFile> <outFile> "
-                         "<spatial discretization size> <Gaussian smoothing sigma>\n"
-                         "python {fle} savePlotsSingle <compressed Data file> <label> <output directory>\n"
-                         "python {fle} savePlotsTogether <density directory> <output directory>".format(fle=sys.argv[0])))
+        raise ValueError
 
 
 

+ 1 - 1
regmaxsn/scripts/analysis/viz2DOccupancy.py

@@ -33,7 +33,7 @@ for swc in swcFiles:
 
 voxelCounter = Counter(voxels)
 df = pd.DataFrame()
-for voxel, count in voxelCounter.iteritems():
+for voxel, count in voxelCounter.items():
     tempDict = {"X": voxel[0], "Y": voxel[1], "count": count}
     df = df.append(tempDict, ignore_index=True)
 

+ 1 - 1
regmaxsn/scripts/utils/addRandomNoise.py

@@ -26,7 +26,7 @@ expNames = [
               ]
 
 outPath = dirPath
-noiseStds = range(1, 6)
+noiseStds = list(range(1, 6))
 # ----------------------------------------------------------------------------------------------------------------------
 
 baseSWCs = [os.path.join(dirPath, expName + '.swc') for expName in expNames]

+ 2 - 1
regmaxsn/scripts/utils/constructRegMaxSNParFile.py

@@ -21,11 +21,11 @@ from numpy import pi, deg2rad
 import os
 import json
 from regmaxsn.core.RegMaxSPars import RegMaxSNParNames
+import pathlib as pl
 
 temp = os.path.split(os.path.abspath(__file__))[0]
 temp1 = os.path.split(temp)[0]
 
-
 # **********************************************************************************************************************
 
 # Default parameters
@@ -171,5 +171,6 @@ pars = [{k: ns[k] for k in RegMaxSNParNames}]
 # **********************************************************************************************************************
 
 # write the parameters into the parameter file.
+pl.Path(parFile).parent.mkdir(exist_ok=True)
 with open(parFile, 'w') as fle:
     json.dump(pars, fle)

+ 14 - 14
regmaxsn/scripts/utils/constructRegMaxSParFile.py

@@ -19,17 +19,17 @@ from numpy import pi, deg2rad
 import os
 import json
 from regmaxsn.core.RegMaxSPars import RegMaxSParNames
+import pathlib as pl
 
 # obtaining the directory path containing the folder containing this file
 temp = os.path.split(os.path.abspath(__file__))[0]
 temp1 = os.path.split(temp)[0]
 
-
 # **********************************************************************************************************************
 
 # Default parameters
 # distances in um, angles in radians
-gridSizes = [80.0, 40.0, 20.0, 10.0]
+gridSizes = [40.0, 20.0, 10.0]
 transBounds = [[-30, 30], [-30, 30], [-30, 30]]
 transMinRes = 1
 rotBounds = [[-pi / 6, pi / 6], [-pi / 6, pi / 6], [-pi / 6, pi / 6]]
@@ -67,10 +67,10 @@ nCPU = 6
 #
 # -------------------------------------------
 # # Example 1
-# refSWC = os.path.join(temp1, 'TestFiles', 'HSN-fluoro01.CNG.swc')
-# testSWC = os.path.join(temp1, 'TestFiles', 'HSN-fluoro01.CNGRandTrans1.swc')
-# resFile = os.path.join(temp1, 'Results', 'Tests', 'HSN-fluoro01.CNGRandTrans1.swc')
-# parFile = os.path.join(temp1, 'ParFiles', 'Reg-MaxS', 'HSN-fluoro01.CNGRandTrans1.json')
+refSWC = os.path.join(temp1, 'TestFiles', 'HSN-fluoro01.CNG.swc')
+testSWC = os.path.join(temp1, 'TestFiles', 'HSN-fluoro01.CNGRandTrans1.swc')
+resFile = os.path.join(temp1, 'Results', 'Tests', 'HSN-fluoro01.CNGRandTrans1.swc')
+parFile = os.path.join(temp1, 'ParFiles', 'Reg-MaxS', 'HSN-fluoro01.CNGRandTrans1.json')
 # -------------------------------------------
 # # Example 2
 # refSWC = os.path.join(temp1, 'TestFiles', 'HSN-fluoro01.CNG.swc')
@@ -79,10 +79,10 @@ nCPU = 6
 # parFile = os.path.join(temp1, 'ParFiles', 'Reg-MaxS', 'HSN-fluoro01.CNGRandRot0.json')
 # -------------------------------------------
 # # Example 3
-refSWC = os.path.join(temp1, 'TestFiles', 'HSN-fluoro01.CNG.swc')
-testSWC = os.path.join(temp1, 'TestFiles', 'HSN-fluoro01.CNGRandTranslate0.swc')
-resFile = os.path.join(temp1, 'Results', 'Tests', 'HSN-fluoro01.CNGRandTranslate0.swc')
-parFile = os.path.join(temp1, 'ParFiles', 'Reg-MaxS', 'HSN-fluoro01.CNGRandTranslate0.json')
+# refSWC = os.path.join(temp1, 'TestFiles', 'HSN-fluoro01.CNG.swc')
+# testSWC = os.path.join(temp1, 'TestFiles', 'HSN-fluoro01.CNGRandTranslate0.swc')
+# resFile = os.path.join(temp1, 'Results', 'Tests', 'HSN-fluoro01.CNGRandTranslate0.swc')
+# parFile = os.path.join(temp1, 'ParFiles', 'Reg-MaxS', 'HSN-fluoro01.CNGRandTranslate0.json')
 # -------------------------------------------
 # # Example 4
 # refSWC = os.path.join(temp1, 'TestFiles', 'HSN-fluoro01.CNG.swc')
@@ -93,13 +93,13 @@ parFile = os.path.join(temp1, 'ParFiles', 'Reg-MaxS', 'HSN-fluoro01.CNGRandTrans
 # Example 5
 # refSWC = os.path.join(temp1, 'TestFiles', 'LLC', 'Gad1-F-000062.CNG.swc')
 # testSWC = os.path.join(temp1, 'TestFiles', 'LLC', 'Cha-F-400051.CNG.swc')
-# resFile = os.path.join(temp1, 'Results', 'Tests', 'LLC', 'Cha-F-400051.CNG.swc')
+# resFile = os.path.join(temp1, 'Results', 'Reg-MaxS', 'LLC', 'Cha-F-400051.CNG.swc')
 # parFile = os.path.join(temp1, 'ParFiles', 'Reg-MaxS', 'LLC1.json')
 # inPartsDir = os.path.join(temp1, 'TestFiles', 'LLC', 'Cha-F-400051.CNG')
 # outPartsDir = os.path.join(temp1, 'Results', 'Tests', 'LLC', 'Cha-F-400051.CNG')
 #
 # -------------------------------------------
-# obtains the list of variables in the current work space
+# # obtains the list of variables in the current work space
 ns = vars()
 # forms the dictionary of parameters to be saved into the parameter file.
 pars = [{k: ns[k] for k in RegMaxSParNames}]
@@ -130,7 +130,6 @@ pars = [{k: ns[k] for k in RegMaxSParNames}]
 #             'HSN-fluoro01.CNGRandTrans7.swc',
 #             'HSN-fluoro01.CNGRandTrans8.swc',
 #             'HSN-fluoro01.CNGRandTrans9.swc',
-#
 #             # 'HSN-fluoro01.CNGNoiseStd1RandTrans.swc',
 #             # 'HSN-fluoro01.CNGNoiseStd2RandTrans.swc',
 #             # 'HSN-fluoro01.CNGNoiseStd3RandTrans.swc',
@@ -152,9 +151,10 @@ pars = [{k: ns[k] for k in RegMaxSParNames}]
 #     ns = vars()
 #     # forms the dictionary of parameters to be saved into the parameter file.
 #     pars.append({k: ns[k] for k in RegMaxSParNames})
-# -------------------------------------------
+# # -------------------------------------------
 # **********************************************************************************************************************
 
 # write the parameters into the parameter file.
+pl.Path(parFile).parent.mkdir(exist_ok=True)
 with open(parFile, 'w') as fle:
     json.dump(pars, fle)

+ 1 - 1
regmaxsn/scripts/utils/correctReg-MaxS-N_finalChoice.py

@@ -34,7 +34,7 @@ def getRegMaxSNIterVsMeasure(resDir, swcList, voxelSize):
 def correctRegMaxSNChoice(parFile, parNames):
     assert os.path.isfile(parFile), "{} not found".format(parFile)
 
-    ch = raw_input('Using parameter File {}.\n Continue?(y/n)'.format(parFile))
+    ch = input('Using parameter File {}.\n Continue?(y/n)'.format(parFile))
 
     if ch != 'y':
         print('User Abort!')

+ 1 - 1
setup.py

@@ -18,5 +18,5 @@ setup(
                           "tifffile>=0.11.1",
                           "pyemd>=0.4.4",
                           "statsmodels>=0.8"],
-        python_requires=">=2.7",
+        python_requires=">=3.7",
     )

+ 7 - 6
setupWorkspace.py

@@ -9,9 +9,10 @@ try:
     import regmaxsn
     utilsDir = os.path.join(os.path.abspath(regmaxsn.__path__[0]), 'scripts', 'utils')
 except ImportError as e:
-    raise(ImportError('The package regmaxsn must be installed before this script can be used.'))
-whereToCreate = raw_input("Enter where the workspace must be created (using {} "
-                          "if nothing is specifed):".format(homeFolder))
+    raise ImportError('The package regmaxsn must be installed before this script can be used.')
+whereToCreate = input(
+    "Enter where the workspace must be created (using {} "
+    "if nothing is specifed):".format(homeFolder))
 if whereToCreate == "":
     whereToCreate = homeFolder
 assert os.path.exists(whereToCreate), "Specified path {} does not exist".format(whereToCreate)
@@ -26,14 +27,14 @@ assert os.path.isdir(pkgParFilesDir) and os.path.isdir(pkgTestFilesDir), "Folder
 workSpace = os.path.join(whereToCreate, 'RegMaxSN_WorkSpace')
 try:
     if os.path.exists(workSpace):
-        ch = raw_input('A RegMaxSN Workspace already exists. Delete it and all files in it and create new one?(y/n):')
+        ch = input('A RegMaxSN Workspace already exists. Delete it and all files in it and create new one?(y/n):')
         if ch == "y":
             shutil.rmtree(workSpace)
         else:
             sys.exit('User Abort')
     os.mkdir(workSpace)
 except IOError as e:
-    raise(IOError('Error writing into {}. Please make sure its writable'.format(workSpace)))
+    raise IOError('Error writing into {}. Please make sure its writable'.format(workSpace))
 
 parFilesDir = os.path.join(workSpace, "ParFiles")
 shutil.copytree(pkgParFilesDir, parFilesDir)
@@ -49,7 +50,7 @@ os.mkdir(os.path.join(resDir, "Reg-MaxS"))
 os.mkdir(os.path.join(resDir, "Reg-MaxS-N"))
 os.mkdir(os.path.join(resDir, "PCABased"))
 
-print("Succesfullly created Work Space at {}".format(workSpace))
+print(("Succesfullly created Work Space at {}".format(workSpace)))
 
 
 

+ 2 - 0
tests/maxDistancesBasedMetric_test.py

@@ -1,6 +1,7 @@
 from regmaxsn.core.maxDistanceBasedMetric import calcMaxDistances, maxDistEMD, cdist_1d_centripetal
 import numpy as np
 
+
 def calcMaxDistances_test():
     """Testing the calculation of maximum distances"""
 
@@ -31,5 +32,6 @@ def cdist_1d_test():
     assert np.allclose(temp,
                        np.array([[0, -1], [1, 0], [0, -1]]))
 
+
 if __name__ == "__main__":
     maxDistEMD_test()