Commit 690e2849 authored by JosefBrandt's avatar JosefBrandt

Hotfix in sure_fg determination

Further performance improvements
Fixing error when cancelling detection (returning None only twice!)
parent d9b6effc
......@@ -153,14 +153,14 @@ class Segmentation(object):
print("adaptive Histogram Adjustment")
if self.cancelcomputation:
return None, None, None
return None, None
if self.activateContrastCurve:
xi, arr = self.calculateHistFunction(self.contrastCurve)
gray = arr[gray]
print("contrast curve")
if self.cancelcomputation:
return None, None, None
return None, None
# return even if inactive!
if return_step=="activateContrastCurve": return gray, 0
......@@ -168,10 +168,12 @@ class Segmentation(object):
# image blur for noise-reduction
blur = cv2.medianBlur(gray, self.blurRadius)
blur = np.uint8(blur*(255/blur.max()))
del gray
if return_step=="blurRadius": return blur, 0
print("blur")
if self.cancelcomputation:
return None, None, None
return None, None
# thresholding
if self.activateLowThresh and not self.activateUpThresh:
......@@ -181,7 +183,7 @@ class Segmentation(object):
if return_step=="lowThresh": return thresh, 0
print("lower threshold")
if self.cancelcomputation:
return None, None, None
return None, None
elif self.activateLowThresh and self.activateUpThresh:
lowerLimit, upperLimit = np.round(self.lowThresh*255), np.round(self.upThresh*255)
......@@ -192,7 +194,7 @@ class Segmentation(object):
if return_step=="lowThresh" or return_step=="upThresh": return thresh, 0
print("between threshold")
if self.cancelcomputation:
return None, None, None
return None, None
elif not self.activateLowThresh and self.activateUpThresh:
thresh = np.zeros_like(blur)
......@@ -202,7 +204,7 @@ class Segmentation(object):
if return_step=="upThresh": return thresh, 0
print("upper threshold")
if self.cancelcomputation:
return None, None, None
return None, None
else: #no checkbox checked
if self.parent is not None:
self.parent.raiseWarning('No thresholding method selected!\nAborted detection..')
......@@ -211,7 +213,8 @@ class Segmentation(object):
#close holes darkter than self.max_brightness
thresh = self.closeBrightHoles(thresh, blur, self.maxholebrightness)
print("closed holes")
del blur
print("thresholded")
# modify thresh with seedpoints and deletepoints
for p in np.int32(seedpoints):
......@@ -221,7 +224,7 @@ class Segmentation(object):
if return_step=='maxholebrightness': return thresh, 0
if self.cancelcomputation:
return None, None, None
return None, None
if self.enableMaxArea:
maxArea = self.maxparticlearea
......@@ -255,7 +258,7 @@ class Segmentation(object):
width = stats[label, cv2.CC_STAT_WIDTH]
height = stats[label, cv2.CC_STAT_HEIGHT]
subthresh = np.uint8(255 * (labels[up:(up+height), left:(left+width)] == label))
subdist = cv2.distanceTransform(subthresh, cv2.DIST_L2,3)
subdist = cv2.distanceTransform(subthresh, cv2.DIST_L2, 3)
sure_fg = self.getSureForeground(subthresh, subdist, self.minparticledistance)
sure_bg = cv2.dilate(subthresh, np.ones((5, 5)), iterations = 1)
......@@ -271,7 +274,7 @@ class Segmentation(object):
cv2.circle(sure_bg, tuple([p[0]-left, p[1]-up]), int(p[2]), 0, -1)
if self.cancelcomputation:
return None, None, None
return None, None
if return_step=="sure_fg":
preview_surefg = self.addToPreviewImage(sure_fg, up, left, preview_surefg)
......@@ -288,7 +291,7 @@ class Segmentation(object):
markers = watershed(-subdist, markers, mask=sure_bg, compactness = self.compactness, watershed_line = True) #labels = 0 for background, 1... for particles
if self.cancelcomputation:
return None, None, None
return None, None
if return_step=="watershed":
previewImage = self.addToPreviewImage(markers, up, left, previewImage)
......@@ -299,7 +302,7 @@ class Segmentation(object):
else:
temp, contours, hierarchy = cv2.findContours(markers, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
if self.cancelcomputation:
return None, None, None
return None, None
tmpcontours = [contours[i] for i in range(len(contours)) if hierarchy[0,i,3]<0]
......@@ -465,45 +468,74 @@ class Segmentation(object):
If desired, a fuzzy Clustering is applied to these to reduce the number of considered seed points.
:return:
"""
def simplifyByFuzzyClustering(points):
def simplifyByFuzzyClustering(points, maxNumPoints=100, maxNumClusters=50):
"""
Runs fuzzy-c-means clustering on the points to reduce the number of seed points
:return:
"""
newPoints = []
numPeaks = len(points)
if len(points) <= maxNumPoints:
xpts = [peak[1] for peak in points]
ypts = [peak[0] for peak in points]
alldata = np.vstack((ypts, xpts))
fpcs = []
cntrs = []
for ncenters in range(2, numPeaks):
maxNumClusters = min([maxNumClusters, numPeaks])
for ncenters in range(2, maxNumClusters):
cntr, u, u0, d, jm, p, fpc = fuzz.cluster.cmeans(alldata, ncenters, 2, error=0.005, maxiter=1000, init=None)
fpcs.append(fpc/(ncenters**0.3)) #makes larger cluster numbers less preferred
cntrs.append(cntr)
bestMatchIndex = fpcs.index(max(fpcs))
bestMatchCentres = cntrs[bestMatchIndex]
newPoints = []
for point in bestMatchCentres:
newPoints.append([int(round(point[0])), int(round(point[1]))])
print(f'reduced {numPeaks} to {len(newPoints)} maxima')
else:
newPoints = points
return newPoints
def sortOutTooClosePoints(points, minDistance):
"""
The points-array is taken point-by-point and each point is only taken if it is minDistance away from the last one.
This removes directly adjacent points
:return:
"""
lastPoint = points[0]
fewerPoints = [points[0]]
for point in points[2:]:
if np.linalg.norm(lastPoint-point) > minDistance:
fewerPoints.append(point)
lastPoint = point
return fewerPoints
sure_fg = np.zeros_like(thresh)
submax = np.where(disttransform == disttransform.max())
maxPoints = []
for index in range(len(submax[0])):
y = submax[0][index]
x = submax[1][index]
maxPoints.append([y, x])
localMax = np.uint8(peak_local_max(disttransform, mindistance, exclude_border=False, indices = False))
localMax[disttransform == np.max(disttransform)] = 1
maxPoints = np.where(localMax == np.max(localMax))
maxPoints = np.transpose(np.array(maxPoints))
if len(maxPoints) > 3:
maxPoints = sortOutTooClosePoints(maxPoints, mindistance)
localMaxima = np.uint8(peak_local_max(disttransform, mindistance, indices = True))
for locMax in localMaxima:
maxPoints.append(locMax)
if len(maxPoints) > 3 and self.fuzzycluster:
clusteredPoints = simplifyByFuzzyClustering(maxPoints)
atLeastOnePointAdded = False
for point in clusteredPoints:
if thresh[point[0], point[1]] != 0:
sure_fg[point[0], point[1]] = 1
atLeastOnePointAdded = True
numPeaks = len(maxPoints)
if numPeaks > 3 and self.fuzzycluster:
maxPoints = simplifyByFuzzyClustering(maxPoints)
if not atLeastOnePointAdded:
point = maxPoints[0]
sure_fg[point[0], point[1]] = 1
for peak in maxPoints:
sure_fg[peak[0], peak[1]] = 1
else:
for point in maxPoints:
sure_fg[point[0], point[1]] = 1
sure_fg = cv2.dilate(sure_fg, np.ones((3, 3)))
return sure_fg
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment