# -*- coding: utf-8 -*- """ Created on Wed Jan 16 12:43:00 2019 @author: brandt """ """ GEPARD - Gepard-Enabled PARticle Detection Copyright (C) 2018 Lars Bittrich and Josef Brandt, Leibniz-Institut für Polymerforschung Dresden e. V. This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program, see COPYING. If not, see . """ import numpy as np import cv2 #import matplotlib.pyplot as plt class ParticleEditor(object): def __init__(self, parent): self.parent = parent #the assigned analysis widget def createSafetyBackup(self): self.parent.parent.dataset.saveBackup() def combineParticles(self, contourIndices, new_assignment): contourIndices = sorted(contourIndices) #we want to keep the contour with lowest index print('selected contours:', contourIndices) self.createSafetyBackup() #get contours: contours = [self.parent.parent.dataset.particlecontours[i] for i in contourIndices] cnt = np.vstack(tuple(contours)) #combine contous #draw contours xmin, xmax = cnt[:,0,:][:, 0].min(), cnt[:,0,:][:, 0].max() ymin, ymax = cnt[:,0,:][:, 1].min(), cnt[:,0,:][:, 1].max() padding = 2 #pixel in each direction rangex = int(np.round((xmax-xmin)+2*padding)) rangey = int(np.round((ymax-ymin)+2*padding)) for i in range(len(cnt)): cnt[i][0][0] -= xmin-padding cnt[i][0][1] -= ymin-padding img = np.zeros((rangey, rangex)) cv2.drawContours(img, [cnt], 0, 1, -1) cv2.drawContours(img, [cnt], 0, 1, 1) img = np.uint8(cv2.morphologyEx(img, cv2.MORPH_CLOSE, np.ones((3, 3)))) temp, contours, hierarchy = cv2.findContours(img, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE) newContour = contours[0] stats = self.characterizeParticle(newContour) for i in range(len(newContour)): newContour[i][0][0] += xmin-padding newContour[i][0][1] += ymin-padding #check, if dataset contains (already modified) particle2spectra, otherwise create new. if self.parent.parent.dataset.particles2spectra is None: #create default assignment print('recreating particles2spectra from within edit particles...') sortindices = self.parent.parent.dataset.ramanscansortindex self.parent.parent.dataset.particles2spectra = [[int(np.where(sortindices == i)[0])] for i in range(len(sortindices))] #Contour indices are the same as the original particlestats, which are contained in the dataset. #We have to modify that and reload in the analysisview #first, overwrite first index with new particlestats self.parent.parent.dataset.particlestats[contourIndices[0]] = stats #now, delete the rest... self.parent.parent.dataset.particlestats = [i for ind, i in enumerate(self.parent.parent.dataset.particlestats) if ind not in contourIndices[1:]] #same with the contours self.parent.parent.dataset.particlecontours[contourIndices[0]] = newContour self.parent.parent.dataset.particlecontours = [i for ind, i in enumerate(self.parent.parent.dataset.particlecontours) if ind not in contourIndices[1:]] #update particle2spectra_list #what is the current particle index?? specIndices = [] #other spectra indices: for index in contourIndices: specIndices.append(self.parent.particles2spectra[index]) #flatten index list (in case, that a nested list was created...) specIndices = list(np.unique(np.array(specIndices))) for i in specIndices: self.parent.spectraResults[i] = new_assignment self.parent.hqis[i] = 100 #avoid sorting them out again by hqi-filter... print(f'spectrum {i} of particle{contourIndices[0]} is now {new_assignment}') #modify particles2spectra.. self.parent.parent.dataset.particles2spectra[contourIndices[0]] = specIndices for index in reversed(contourIndices[1:]): print('removing index from particles2spectra:', index) del self.parent.parent.dataset.particles2spectra[index] #save dataset self.parent.parent.dataset.save() #update contours in sampleview self.parent.parent.contouritem.resetContours(self.parent.parent.dataset.particlecontours) self.parent.loadParticleData() def reassignParticles(self, contourindices, new_assignment): self.createSafetyBackup() for partIndex in contourindices: for specIndex in self.parent.particles2spectra[partIndex]: self.parent.currentPolymers[specIndex] = new_assignment self.parent.spectraResults[specIndex] = new_assignment self.parent.hqis[specIndex] = 100 self.parent.createHistogramData() def deleteParticles(self): self.createSafetyBackup() pass def splitParticles(self): self.createSafetyBackup() pass def characterizeParticle(self, contours): ##characterize particle longellipse, shortellipse = np.nan, np.nan cnt = contours if cnt.shape[0] >= 5: ##at least 5 points required for ellipse fitting... ellipse = cv2.fitEllipse(cnt) shortellipse, longellipse = ellipse[1] rect = cv2.minAreaRect(cnt) long, short = rect[1] if short>long: long, short = short, long return long, short, longellipse, shortellipse, cv2.contourArea(cnt) #if __name__ == '__main__': # import