particleeditor.py 7.53 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 16 12:43:00 2019

@author: brandt
"""

"""
GEPARD - Gepard-Enabled PARticle Detection
Copyright (C) 2018  Lars Bittrich and Josef Brandt, Leibniz-Institut für 
Polymerforschung Dresden e. V. <bittrich-lars@ipfdd.de>    

This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.

This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with this program, see COPYING.  
If not, see <https://www.gnu.org/licenses/>.
"""
import numpy as np
import cv2
Hackmet's avatar
Hackmet committed
29
from PyQt5 import QtWidgets
30 31

class ParticleEditor(object):
32 33
    def __init__(self, datastats, parent):
        self.datastats = datastats
34
        self.parent = parent    #the assigned analysis widget
Hackmet's avatar
Hackmet committed
35 36 37
        self.backupFreq = 3     #save a backup every n actions
        self.neverBackedUp = True
        self.actionCounter = 0
38 39

    def createSafetyBackup(self):
Hackmet's avatar
Hackmet committed
40 41
        self.actionCounter += 1
        if self.actionCounter == self.backupFreq-1 or self.neverBackedUp:            
42
            backupname = self.datastats.dataset.saveBackup()
Hackmet's avatar
Hackmet committed
43
            print('backing up as', backupname)
Hackmet's avatar
Hackmet committed
44 45 46 47 48 49 50 51
            self.neverBackedUp = False
            self.actionCounter = 0
        
    def getNewEntry(self):
        text, okClicked = QtWidgets.QInputDialog.getText(self.parent.parent, "Custom assignment", "Enter new assignment")
        if okClicked and text != '':
            return text
    
52
    def combineParticles(self, contourIndices, new_assignment):
Hackmet's avatar
Hackmet committed
53 54
        if new_assignment == 'other':
            new_assignment = self.getNewEntry()
55 56
        if new_assignment is None:
            return
Hackmet's avatar
Hackmet committed
57
        
58
        contourIndices = sorted(contourIndices)   #we want to keep the contour with lowest index
Hackmet's avatar
Hackmet committed
59
        print('merging contours:', contourIndices)
60 61
        self.createSafetyBackup()
        #get contours:
62
        contours = [self.datastats.dataset.particlecontours[i] for i in contourIndices]
63 64 65 66 67 68 69 70 71 72 73
        cnt = np.vstack(tuple(contours))  #combine contous
        
        #draw contours
        xmin, xmax = cnt[:,0,:][:, 0].min(), cnt[:,0,:][:, 0].max()
        ymin, ymax = cnt[:,0,:][:, 1].min(), cnt[:,0,:][:, 1].max()        
        
        padding = 2    #pixel in each direction
        rangex = int(np.round((xmax-xmin)+2*padding))
        rangey = int(np.round((ymax-ymin)+2*padding))

        img = np.zeros((rangey, rangex))
Hackmet's avatar
Hackmet committed
74
        for i in contourIndices:
75
            curCnt = self.datastats.dataset.particlecontours[i]
Hackmet's avatar
Hackmet committed
76 77 78 79 80 81 82
            for i in range(len(curCnt)):
                curCnt[i][0][0] -= xmin-padding
                curCnt[i][0][1] -= ymin-padding
                
            cv2.drawContours(img, [curCnt], -1, 1, -1)
            cv2.drawContours(img, [curCnt], -1, 1, 1)
        
83 84
        img = np.uint8(cv2.morphologyEx(img, cv2.MORPH_CLOSE, np.ones((3, 3))))
        
85 86 87 88
        if cv2.__version__ > '3.5':
            contours, hierarchy = cv2.findContours(img, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
        else:
            temp, contours, hierarchy = cv2.findContours(img, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
89 90 91 92 93 94 95 96 97 98

        newContour = contours[0]
        stats = self.characterizeParticle(newContour)

        for i in range(len(newContour)):
            newContour[i][0][0] += xmin-padding
            newContour[i][0][1] += ymin-padding
        
        
        #check, if dataset contains (already modified) particle2spectra, otherwise create new.
99
        if self.datastats.dataset.particles2spectra is None:  #create default assignment
100
            print('recreating particles2spectra from within edit particles...')
101 102
            sortindices = self.datastats.dataset.ramanscansortindex
            self.datastats.dataset.particles2spectra = [[int(np.where(sortindices == i)[0])] for i in range(len(sortindices))]
103 104 105 106
        
        #Contour indices are the same as the original particlestats, which are contained in the dataset. 
        #We have to modify that and reload in the analysisview
        #first, overwrite first index with new particlestats
107
        self.datastats.dataset.particlestats[contourIndices[0]] = stats
108 109
        
        #now, delete the rest...
110
        self.datastats.dataset.particlestats = [i for ind, i in enumerate(self.datastats.dataset.particlestats) if ind not in contourIndices[1:]]
111 112
        
        #same with the contours
113 114
        self.datastats.dataset.particlecontours[contourIndices[0]] = newContour
        self.datastats.dataset.particlecontours = [i for ind, i in enumerate(self.datastats.dataset.particlecontours) if ind not in contourIndices[1:]]
115 116 117 118 119 120
        
        #update particle2spectra_list
        #what is the current particle index??
        specIndices = []
        #other spectra indices:
        for index in contourIndices:
121
            specIndices.append(self.datastats.particles2spectra[index])
122 123
        
        #flatten index list (in case, that a nested list was created...)
Hackmet's avatar
Hackmet committed
124
        specIndices = list(np.concatenate(specIndices))
125
        for i in specIndices:
126 127
            self.datastats.spectraResults[i] = new_assignment
            self.datastats.hqis[i] = 100   #avoid sorting them out again by hqi-filter...
128 129 130
            print(f'spectrum {i} of particle{contourIndices[0]} is now {new_assignment}')
        
        #modify particles2spectra..         
131
        self.datastats.dataset.particles2spectra[contourIndices[0]] = specIndices
132 133
        for index in reversed(contourIndices[1:]):
            print('removing index from particles2spectra:', index)
134
            del self.datastats.dataset.particles2spectra[index]
135

Hackmet's avatar
Hackmet committed
136
        #save data
137
        self.datastats.saveAnalysisResults()
138 139
        
        #update contours in sampleview
140
        self.parent.parent.contouritem.resetContours(self.datastats.dataset.particlecontours)
141 142 143 144
        self.parent.loadParticleData()
       
    
    def reassignParticles(self, contourindices, new_assignment):
Hackmet's avatar
Hackmet committed
145 146
        if new_assignment == 'other':
            new_assignment = self.getNewEntry()
147 148
        if new_assignment is None:
            return
Hackmet's avatar
Hackmet committed
149
            
150
        self.createSafetyBackup()
Hackmet's avatar
Hackmet committed
151
        print(f'reassigning indices {contourindices} into {new_assignment}')
152
        for partIndex in contourindices:
153 154 155 156
            for specIndex in self.datastats.particles2spectra[partIndex]:
                self.datastats.currentPolymers[specIndex] = new_assignment
                self.datastats.spectraResults[specIndex] = new_assignment
                self.datastats.hqis[specIndex] = 100
157

Hackmet's avatar
Hackmet committed
158
        #save data
159
        self.datastats.saveAnalysisResults()
Hackmet's avatar
Hackmet committed
160

Hackmet's avatar
Hackmet committed
161
        self.parent.loadParticleData()
162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187
            
    
    def deleteParticles(self):
        self.createSafetyBackup()
        pass
    
    def splitParticles(self):
        self.createSafetyBackup()
        pass
    
    def characterizeParticle(self, contours):
        ##characterize particle
        longellipse, shortellipse = np.nan, np.nan
        
        cnt = contours
        
        if cnt.shape[0] >= 5:       ##at least 5 points required for ellipse fitting...
            ellipse = cv2.fitEllipse(cnt)
            shortellipse, longellipse = ellipse[1]

        rect = cv2.minAreaRect(cnt)
        long, short = rect[1]
        if short>long:
            long, short = short, long
    
        return long, short, longellipse, shortellipse, cv2.contourArea(cnt)
Hackmet's avatar
Hackmet committed
188 189
            

190
    
Hackmet's avatar
Hackmet committed
191