legacyConvert.py 9.64 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
GEPARD - Gepard-Enabled PARticle Detection
Copyright (C) 2018  Lars Bittrich and Josef Brandt, Leibniz-Institut für 
Polymerforschung Dresden e. V. <bittrich-lars@ipfdd.de>    

This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.

This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with this program, see COPYING.  
If not, see <https://www.gnu.org/licenses/>.
"""
import numpy as np
import cv2
import os

26 27 28 29
from .helperfunctions import cv2imread_fix, cv2imwrite_fix
from .analysis.particleContainer import ParticleContainer
from .analysis import particleCharacterization as pc
from .errors import InvalidParticleError
30 31

currentVersion = 4
32 33 34 35 36 37 38

def legacyConversion(dset, recreatefullimage=False):
    if dset.version==0:
        print("Converting legacy version 0 to 1")
        print("This may take some time")
        
        # local imports as these functions are only needed for the rare occasion of legacy conversion
39
        from .opticalscan import loadAndPasteImage
40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79
        
        # try to load png and check for detection contours
        recreatefullimage = recreatefullimage or not os.path.exists(dset.getLegacyImageName())
        if not recreatefullimage:
            img = cv2imread_fix(dset.getLegacyImageName())
            Nc = len(dset.particlecontours)
            if Nc>0:
                contour = dset.particlecontours[Nc//2]
                contpixels = img[contour[:,0,1],contour[:,0,0]]
                if np.all(contpixels[:,1]==255) and np.all(contpixels[:,2]==0) \
                    and np.all(contpixels[:,0]==0):
                    recreatefullimage = True
            if not recreatefullimage:
                cv2imwrite_fix(dset.getImageName(), img)
            del img
        
        if recreatefullimage:
            print("recreating fullimage from grid data")
            imgdata = None
            zvalimg = None
            Ngrid = len(dset.grid)
            
            width, height, rotationvalue = dset.imagedim_df
            p0, p1 = dset.maxdim[:2], dset.maxdim[2:]
            for i in range(Ngrid):
                print(f"Processing image {i+1} of {Ngrid}")
                names = []
                for k in range(len(dset.zpositions)):
                    names.append(os.path.join(dset.getScanPath(), f"image_{i}_{k}.bmp"))
                p = dset.grid[i]
                imgdata, zvalimg = loadAndPasteImage(names, imgdata, zvalimg, width, 
                                                        height, rotationvalue, p0, p1, p)
            dset.zvalimg = zvalimg
            cv2imwrite_fix(dset.getImageName(), cv2.cvtColor(imgdata, cv2.COLOR_RGB2BGR))
            del imgdata
        dset.saveZvalImg()
        if "particleimgs" in dset.__dict__:
            del dset.particleimgs
        
        dset.version = 1
80
        dset.save()
81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98
        
    if dset.version == 1:
        print("Converting legacy version 1 to 2")
        if hasattr(dset, 'pixelscale'):
            print('pixelscale was', dset.pixelscale)
            dset.pixelscale_bf = dset.pixelscale
            dset.pixelscale_df = dset.pixelscale
            del dset.pixelscale
        
        if hasattr(dset, 'imagedim'):
            dset.imagedim_bf = dset.imagedim
            dset.imagedim_df = dset.imagedim
            del dset.imagedim
        
        if not hasattr(dset, 'particles2spectra'):
            dset.particles2spectra = [[int(np.where(dset.ramanscansortindex == i)[0])] for i in range(len(dset.ramanscansortindex))]
        
        dset.version = 2
99
        dset.save()
100 101 102 103 104
    
    if dset.version == 2:
        print("Converting legacy version 2 to 3")
        transferParticleStatsToParticleContainer(dset)
        dset.version = 3
105
        dset.save()
106 107 108
        
    if dset.version == 3:
        print("Converting legacy version 3 to 4")
109 110 111
        updateParticleStats(dset)
        removeLegacyAttributes(dset)
        dset.version = 4
112
        dset.save()
113 114
        
# add later conversion for higher version numbers here
115 116 117 118 119 120
        

def transferParticleStatsToParticleContainer(dset):
    dset.particleContainer = ParticleContainer(dset)
    dset.particleContainer.initializeParticles(len(dset.particlestats))
    dset.particleContainer.setParticleContours(dset.particlecontours)
121
    assert len(dset.particleContainer.particles) == len(dset.particlestats)
122
    
123 124 125 126 127 128 129
    for index, particle in enumerate(dset.particleContainer.particles):
        particle.longSize_box = float(dset.particlestats[index][0])
        particle.shortSize_box = float(dset.particlestats[index][1])
        particle.longSize_ellipse = float(dset.particlestats[index][2])
        particle.shortSize_ellipse = float(dset.particlestats[index][3])
        particle.area = float(dset.particlestats[index][4])
    
130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156
    dset.particleContainer.applyPixelScaleToParticleStats(dset.getPixelScale())
    
    dset.particleContainer.clearMeasurements()
    
    if not hasattr(dset, 'particles2spectra') or dset.particles2spectra is None:
        dset.particles2spectra = [[int(np.where(dset.ramanscansortindex == i)[0])] for i in range(len(dset.ramanscansortindex))]
    
    if len(dset.particlestats) > 0:   #i.e., particle detection was completed and particle data is there 
        dset.particleDetectionDone = True
        for particleIndex, listOfScanIndices in enumerate(dset.particles2spectra):
            curParticle = dset.particleContainer.getParticleOfIndex(particleIndex)
            curParticle.measurements = []
            for scanIndex in listOfScanIndices:
                indexOfNewMeas = dset.particleContainer.addEmptyMeasurement()
                indexOfMeasInRamanPoints = dset.ramanscansortindex[scanIndex]
                x, y = dset.ramanpoints[indexOfMeasInRamanPoints][0], dset.ramanpoints[indexOfMeasInRamanPoints][1]
                dset.particleContainer.setMeasurementPixelCoords(indexOfNewMeas, x, y)
                dset.particleContainer.setMeasurementScanIndex(indexOfNewMeas, scanIndex)
                curParticle.addMeasurement(dset.particleContainer.measurements[indexOfNewMeas])
  
    if hasattr(dset, 'results') and dset.results['polymers'] is not None:   #transfer results
        for particle in dset.particleContainer.particles:
            for meas in particle.measurements:
                specIndex = meas.getScanIndex()
                meas.setAssignment(dset.results['polymers'][specIndex])
                meas.setHQI(dset.results['hqis'][specIndex])
    
157
    dset.particleContainer.testForInconsistentParticleAssignments()
158
    
159 160 161 162 163 164
def updateParticleStats(dset):
    def markForDeletion(particle):
        print('index, size, assignments', particle.index, particle.getParticleSize(), particle.getParticleAssignment())
        print('particle is invalid and will be deleted!')
        deleteIndices.append(particle.index)
    
165 166
    fullimage = pc.loadFullimageFromDataset(dset)
    zimg = pc.loadZValImageFromDataset(dset)
167
    colorClassifier = pc.ColorClassifier()
168 169 170
    shapeClassifier = pc.ShapeClassifier()
    deleteIndices = []

171
    for particle in dset.particleContainer.particles:
172 173 174 175 176
        convertSizeAttributes(particle)
        if not pc.particleIsValid(particle):
            markForDeletion(particle)
            continue
        
177 178
        particleImg = pc.getParticleImageFromFullimage(particle.contour, fullimage)
        if particleImg.shape[0] == 0 or particleImg.shape[1] == 0:
179
            markForDeletion(particle)
180
        else:
181 182 183 184 185 186 187 188 189
            #compress particle contours, if possible
            img, xmin, ymin, padding = pc.contoursToImg([particle.contour])
            del particle.contour
            try:
                particle.contour = pc.imgToCnt(img, xmin, ymin, padding)
            except InvalidParticleError:
                markForDeletion(particle)
                continue
            
190
            particle.color = pc.getParticleColor(particleImg, colorClassifier)
191
            try:
192
                particle.height = pc.getParticleHeight(particle.contour, zimg, dset)
193 194 195
                particle.shape = pc.getParticleShape(particle.contour, particle.height, shapeClassifier)
            except InvalidParticleError:
                markForDeletion(particle)
196
                continue
197
            
JosefBrandt's avatar
JosefBrandt committed
198 199 200 201 202
            if particle.shape == 'fibre':
                particle.longSize, particle.shortSize = pc.getFibreDimension(particle.contour)
                particle.longSize *= dset.getPixelScale()
                particle.shortSize *= dset.getPixelScale()
            
203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219
    print(f'deleting {len(deleteIndices)} particles')
    for index in sorted(deleteIndices, reverse=True):
        dset.particleContainer.removeParticle(index)
    dset.particleContainer.resetParticleIndices()
    
def convertSizeAttributes(particle):
    if hasattr(particle, 'longSize_ellipse'):
        if not np.isnan(particle.longSize_ellipse):
            particle.longSize = particle.longSize_ellipse
            particle.shortSize = particle.shortSize_ellipse
        else:
            particle.longSize = particle.longSize_box
            particle.shortSize = particle.shortSize_box
        del particle.longSize_ellipse
        del particle.shortSize_ellipse
        del particle.longSize_box
        del particle.shortSize_box
220
        
221 222 223 224 225
def removeLegacyAttributes(dset):
    for attr in ['results', 'ramanpoints', 'particlestats', 'particlecontours', 'ramanscansortindex']:
        if hasattr(dset, attr):
            del attr