legacyConvert.py 9.57 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
GEPARD - Gepard-Enabled PARticle Detection
Copyright (C) 2018  Lars Bittrich and Josef Brandt, Leibniz-Institut für 
Polymerforschung Dresden e. V. <bittrich-lars@ipfdd.de>    

This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.

This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with this program, see COPYING.  
If not, see <https://www.gnu.org/licenses/>.
"""
import numpy as np
import cv2
import os

26 27 28 29
from .helperfunctions import cv2imread_fix, cv2imwrite_fix
from .analysis.particleContainer import ParticleContainer
from .analysis import particleCharacterization as pc
from .errors import InvalidParticleError
30 31

currentVersion = 4
32 33 34 35 36 37 38

def legacyConversion(dset, recreatefullimage=False):
    if dset.version==0:
        print("Converting legacy version 0 to 1")
        print("This may take some time")
        
        # local imports as these functions are only needed for the rare occasion of legacy conversion
39
        from .opticalscan import loadAndPasteImage
40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105
        
        # try to load png and check for detection contours
        recreatefullimage = recreatefullimage or not os.path.exists(dset.getLegacyImageName())
        if not recreatefullimage:
            img = cv2imread_fix(dset.getLegacyImageName())
            Nc = len(dset.particlecontours)
            if Nc>0:
                contour = dset.particlecontours[Nc//2]
                contpixels = img[contour[:,0,1],contour[:,0,0]]
                if np.all(contpixels[:,1]==255) and np.all(contpixels[:,2]==0) \
                    and np.all(contpixels[:,0]==0):
                    recreatefullimage = True
            if not recreatefullimage:
                cv2imwrite_fix(dset.getImageName(), img)
            del img
        
        if recreatefullimage:
            print("recreating fullimage from grid data")
            imgdata = None
            zvalimg = None
            Ngrid = len(dset.grid)
            
            width, height, rotationvalue = dset.imagedim_df
            p0, p1 = dset.maxdim[:2], dset.maxdim[2:]
            for i in range(Ngrid):
                print(f"Processing image {i+1} of {Ngrid}")
                names = []
                for k in range(len(dset.zpositions)):
                    names.append(os.path.join(dset.getScanPath(), f"image_{i}_{k}.bmp"))
                p = dset.grid[i]
                imgdata, zvalimg = loadAndPasteImage(names, imgdata, zvalimg, width, 
                                                        height, rotationvalue, p0, p1, p)
            dset.zvalimg = zvalimg
            cv2imwrite_fix(dset.getImageName(), cv2.cvtColor(imgdata, cv2.COLOR_RGB2BGR))
            del imgdata
        dset.saveZvalImg()
        if "particleimgs" in dset.__dict__:
            del dset.particleimgs
        
        dset.version = 1
        
    if dset.version == 1:
        print("Converting legacy version 1 to 2")
        if hasattr(dset, 'pixelscale'):
            print('pixelscale was', dset.pixelscale)
            dset.pixelscale_bf = dset.pixelscale
            dset.pixelscale_df = dset.pixelscale
            del dset.pixelscale
        
        if hasattr(dset, 'imagedim'):
            dset.imagedim_bf = dset.imagedim
            dset.imagedim_df = dset.imagedim
            del dset.imagedim
        
        if not hasattr(dset, 'particles2spectra'):
            dset.particles2spectra = [[int(np.where(dset.ramanscansortindex == i)[0])] for i in range(len(dset.ramanscansortindex))]
        
        dset.version = 2
    
    if dset.version == 2:
        print("Converting legacy version 2 to 3")
        transferParticleStatsToParticleContainer(dset)
        dset.version = 3
        
    if dset.version == 3:
        print("Converting legacy version 3 to 4")
106 107 108 109 110
        updateParticleStats(dset)
        removeLegacyAttributes(dset)
        dset.version = 4
        
# add later conversion for higher version numbers here
111 112 113 114 115 116
        

def transferParticleStatsToParticleContainer(dset):
    dset.particleContainer = ParticleContainer(dset)
    dset.particleContainer.initializeParticles(len(dset.particlestats))
    dset.particleContainer.setParticleContours(dset.particlecontours)
117
    assert len(dset.particleContainer.particles) == len(dset.particlestats)
118
    
119 120 121 122 123 124 125
    for index, particle in enumerate(dset.particleContainer.particles):
        particle.longSize_box = float(dset.particlestats[index][0])
        particle.shortSize_box = float(dset.particlestats[index][1])
        particle.longSize_ellipse = float(dset.particlestats[index][2])
        particle.shortSize_ellipse = float(dset.particlestats[index][3])
        particle.area = float(dset.particlestats[index][4])
    
126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152
    dset.particleContainer.applyPixelScaleToParticleStats(dset.getPixelScale())
    
    dset.particleContainer.clearMeasurements()
    
    if not hasattr(dset, 'particles2spectra') or dset.particles2spectra is None:
        dset.particles2spectra = [[int(np.where(dset.ramanscansortindex == i)[0])] for i in range(len(dset.ramanscansortindex))]
    
    if len(dset.particlestats) > 0:   #i.e., particle detection was completed and particle data is there 
        dset.particleDetectionDone = True
        for particleIndex, listOfScanIndices in enumerate(dset.particles2spectra):
            curParticle = dset.particleContainer.getParticleOfIndex(particleIndex)
            curParticle.measurements = []
            for scanIndex in listOfScanIndices:
                indexOfNewMeas = dset.particleContainer.addEmptyMeasurement()
                indexOfMeasInRamanPoints = dset.ramanscansortindex[scanIndex]
                x, y = dset.ramanpoints[indexOfMeasInRamanPoints][0], dset.ramanpoints[indexOfMeasInRamanPoints][1]
                dset.particleContainer.setMeasurementPixelCoords(indexOfNewMeas, x, y)
                dset.particleContainer.setMeasurementScanIndex(indexOfNewMeas, scanIndex)
                curParticle.addMeasurement(dset.particleContainer.measurements[indexOfNewMeas])
  
    if hasattr(dset, 'results') and dset.results['polymers'] is not None:   #transfer results
        for particle in dset.particleContainer.particles:
            for meas in particle.measurements:
                specIndex = meas.getScanIndex()
                meas.setAssignment(dset.results['polymers'][specIndex])
                meas.setHQI(dset.results['hqis'][specIndex])
    
153
    dset.particleContainer.testForInconsistentParticleAssignments()
154
    
155 156 157 158 159 160
def updateParticleStats(dset):
    def markForDeletion(particle):
        print('index, size, assignments', particle.index, particle.getParticleSize(), particle.getParticleAssignment())
        print('particle is invalid and will be deleted!')
        deleteIndices.append(particle.index)
    
161 162
    fullimage = pc.loadFullimageFromDataset(dset)
    zimg = pc.loadZValImageFromDataset(dset)
163
    colorClassifier = pc.ColorClassifier()
164 165 166
    shapeClassifier = pc.ShapeClassifier()
    deleteIndices = []

167
    for particle in dset.particleContainer.particles:
168 169 170 171 172
        convertSizeAttributes(particle)
        if not pc.particleIsValid(particle):
            markForDeletion(particle)
            continue
        
173 174
        particleImg = pc.getParticleImageFromFullimage(particle.contour, fullimage)
        if particleImg.shape[0] == 0 or particleImg.shape[1] == 0:
175
            markForDeletion(particle)
176
        else:
177 178 179 180 181 182 183 184 185
            #compress particle contours, if possible
            img, xmin, ymin, padding = pc.contoursToImg([particle.contour])
            del particle.contour
            try:
                particle.contour = pc.imgToCnt(img, xmin, ymin, padding)
            except InvalidParticleError:
                markForDeletion(particle)
                continue
            
186
            particle.color = pc.getParticleColor(particleImg, colorClassifier)
187
            try:
188
                particle.height = pc.getParticleHeight(particle.contour, zimg, dset)
189 190 191
                particle.shape = pc.getParticleShape(particle.contour, particle.height, shapeClassifier)
            except InvalidParticleError:
                markForDeletion(particle)
192
                continue
193
            
JosefBrandt's avatar
JosefBrandt committed
194 195 196 197 198
            if particle.shape == 'fibre':
                particle.longSize, particle.shortSize = pc.getFibreDimension(particle.contour)
                particle.longSize *= dset.getPixelScale()
                particle.shortSize *= dset.getPixelScale()
            
199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215
    print(f'deleting {len(deleteIndices)} particles')
    for index in sorted(deleteIndices, reverse=True):
        dset.particleContainer.removeParticle(index)
    dset.particleContainer.resetParticleIndices()
    
def convertSizeAttributes(particle):
    if hasattr(particle, 'longSize_ellipse'):
        if not np.isnan(particle.longSize_ellipse):
            particle.longSize = particle.longSize_ellipse
            particle.shortSize = particle.shortSize_ellipse
        else:
            particle.longSize = particle.longSize_box
            particle.shortSize = particle.shortSize_box
        del particle.longSize_ellipse
        del particle.shortSize_ellipse
        del particle.longSize_box
        del particle.shortSize_box
216
        
217 218 219 220 221
def removeLegacyAttributes(dset):
    for attr in ['results', 'ramanpoints', 'particlestats', 'particlecontours', 'ramanscansortindex']:
        if hasattr(dset, attr):
            del attr