editParticles.py 6.29 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 16 12:43:00 2019

@author: brandt
"""

"""
GEPARD - Gepard-Enabled PARticle Detection
Copyright (C) 2018  Lars Bittrich and Josef Brandt, Leibniz-Institut für 
Polymerforschung Dresden e. V. <bittrich-lars@ipfdd.de>    

This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.

This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with this program, see COPYING.  
If not, see <https://www.gnu.org/licenses/>.
"""
import numpy as np
import cv2
#import matplotlib.pyplot as plt

class ParticleEditor(object):
    def __init__(self, parent):
        self.parent = parent    #the assigned analysis widget

    def createSafetyBackup(self):
        self.parent.parent.dataset.saveBackup()

    def combineParticles(self, contourIndices, new_assignment):
        contourIndices = sorted(contourIndices)   #we want to keep the contour with lowest index
        print('selected contours:', contourIndices)
        self.createSafetyBackup()
        #get contours:
        contours = [self.parent.parent.dataset.particlecontours[i] for i in contourIndices]
        cnt = np.vstack(tuple(contours))  #combine contous
        
        #draw contours
        xmin, xmax = cnt[:,0,:][:, 0].min(), cnt[:,0,:][:, 0].max()
        ymin, ymax = cnt[:,0,:][:, 1].min(), cnt[:,0,:][:, 1].max()        
        
        padding = 2    #pixel in each direction
        rangex = int(np.round((xmax-xmin)+2*padding))
        rangey = int(np.round((ymax-ymin)+2*padding))

        for i in range(len(cnt)):
            cnt[i][0][0] -= xmin-padding
            cnt[i][0][1] -= ymin-padding
            
        img = np.zeros((rangey, rangex))
        cv2.drawContours(img, [cnt], 0, 1, -1)
        cv2.drawContours(img, [cnt], 0, 1, 1)
        img = np.uint8(cv2.morphologyEx(img, cv2.MORPH_CLOSE, np.ones((3, 3))))
        
        temp, contours, hierarchy = cv2.findContours(img, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)

        newContour = contours[0]
        stats = self.characterizeParticle(newContour)

        for i in range(len(newContour)):
            newContour[i][0][0] += xmin-padding
            newContour[i][0][1] += ymin-padding
        
        
        #check, if dataset contains (already modified) particle2spectra, otherwise create new.
        if self.parent.parent.dataset.particles2spectra is None:  #create default assignment
            print('recreating particles2spectra from within edit particles...')
            sortindices = self.parent.parent.dataset.ramanscansortindex
            self.parent.parent.dataset.particles2spectra = [[int(np.where(sortindices == i)[0])] for i in range(len(sortindices))]
        
        
        #Contour indices are the same as the original particlestats, which are contained in the dataset. 
        #We have to modify that and reload in the analysisview
        #first, overwrite first index with new particlestats
        self.parent.parent.dataset.particlestats[contourIndices[0]] = stats
        
        #now, delete the rest...
        self.parent.parent.dataset.particlestats = [i for ind, i in enumerate(self.parent.parent.dataset.particlestats) if ind not in contourIndices[1:]]
        
        #same with the contours
        self.parent.parent.dataset.particlecontours[contourIndices[0]] = newContour
        self.parent.parent.dataset.particlecontours = [i for ind, i in enumerate(self.parent.parent.dataset.particlecontours) if ind not in contourIndices[1:]]
        
        
        #update particle2spectra_list
        #what is the current particle index??
        specIndices = []
        #other spectra indices:
        for index in contourIndices:
            specIndices.append(self.parent.particles2spectra[index])
        
        #flatten index list (in case, that a nested list was created...)
        specIndices = list(np.unique(np.array(specIndices)))
        for i in specIndices:
            self.parent.spectraResults[i] = new_assignment
            self.parent.hqis[i] = 100   #avoid sorting them out again by hqi-filter...
            print(f'spectrum {i} of particle{contourIndices[0]} is now {new_assignment}')
        
        
        #modify particles2spectra..         
        self.parent.parent.dataset.particles2spectra[contourIndices[0]] = specIndices
        for index in reversed(contourIndices[1:]):
            print('removing index from particles2spectra:', index)
            del self.parent.parent.dataset.particles2spectra[index]

        #save dataset
        self.parent.parent.dataset.save()        
        
        #update contours in sampleview
        self.parent.parent.contouritem.resetContours(self.parent.parent.dataset.particlecontours)
        
        self.parent.loadParticleData()
       
    
    def reassignParticles(self, contourindices, new_assignment):
        self.createSafetyBackup()
        for partIndex in contourindices:
            for specIndex in self.parent.particles2spectra[partIndex]:
                self.parent.currentPolymers[specIndex] = new_assignment
                self.parent.spectraResults[specIndex] = new_assignment
                self.parent.hqis[specIndex] = 100

        self.parent.createHistogramData()
            
    
    def deleteParticles(self):
        self.createSafetyBackup()
        pass
    
    def splitParticles(self):
        self.createSafetyBackup()
        pass
    
    def characterizeParticle(self, contours):
        ##characterize particle
        longellipse, shortellipse = np.nan, np.nan
        
        cnt = contours
        
        if cnt.shape[0] >= 5:       ##at least 5 points required for ellipse fitting...
            ellipse = cv2.fitEllipse(cnt)
            shortellipse, longellipse = ellipse[1]

        rect = cv2.minAreaRect(cnt)
        long, short = rect[1]
        if short>long:
            long, short = short, long
    
        return long, short, longellipse, shortellipse, cv2.contourArea(cnt)
    
    
#if __name__ == '__main__':
#    import