...
 
Commits (5)
This diff is collapsed.
# -*- coding: utf-8 -*-
"""
GEPARD - Gepard-Enabled PARticle Detection
Copyright (C) 2018 Lars Bittrich and Josef Brandt, Leibniz-Institut für
Polymerforschung Dresden e. V. <bittrich-lars@ipfdd.de>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program, see COPYING.
If not, see <https://www.gnu.org/licenses/>.
"""
from PyQt5 import QtWidgets
import numpy as np
import pandas as pd
import os
import sys
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
class ExpExcelDialog(QtWidgets.QDialog):
def __init__(self, datastats, parent):
super(ExpExcelDialog, self).__init__(parent)
self.setWindowTitle('Export Options')
self.setGeometry(200,200, 300, 300)
self.datastats = datastats
self.particles = self.datastats.particlestats
self.polymers = self.datastats.particleResults
self.additives = self.datastats.currentAdditives
self.hqis = self.datastats.hqis
self.layout = QtWidgets.QHBoxLayout()
self.setLayout(self.layout)
excelvbox = QtWidgets.QVBoxLayout()
excelvbox.addWidget(QtWidgets.QLabel('Select Parameters for Export'))
excelgroup = QtWidgets.QGroupBox("Export to Excel", self)
self.exportOptions = ['Polymer Type (mandatory)', 'Additives', 'Long Size (µm)', 'Short Size (µm)', 'Area (µm²)', 'HQI', 'Size Classes']
self.checkBoxes = []
self.sizeClasses = [5, 10, 20, 50, 100, 1e6]
self.directory = self.datastats.dataset.path
for index, option in enumerate(self.exportOptions):
self.checkBoxes.append(QtWidgets.QCheckBox(self))
self.checkBoxes[-1].setText(option)
self.checkBoxes[-1].setChecked(True)
if option == 'Polymer Type (mandatory)':
self.checkBoxes[-1].setEnabled(False) #is mandatory!!!
if option == 'Additives':
if self.additives is None:
self.checkBoxes[-1].setEnabled(False)
self.checkBoxes[-1].setChecked(False)
excelvbox.addWidget(self.checkBoxes[-1])
self.xlsFileName = QtWidgets.QLineEdit()
self.xlsFileName.setText('{}_Particle_List'.format(self.datastats.dataset.name))
excelvbox.addWidget(QtWidgets.QLabel('Filename:'))
excelvbox.addWidget(self.xlsFileName)
self.exlbtn = QtWidgets.QPushButton('Export to Excel')
self.exlbtn.resize(self.exlbtn.sizeHint())
self.exlbtn.clicked.connect(self.toExcel)
excelvbox.addWidget(self.exlbtn)
excelgroup.setLayout(excelvbox)
self.layout.addWidget(excelgroup)
self.show()
def toExcel(self):
requiredcolumns = []
self.sizes = np.round(np.array([i[0] if np.isnan(i[2]) else i[2] for i in self.particles]), 1)
for box in self.checkBoxes:
if box.isChecked() == True:
if box.text() != 'Size Classes':
requiredcolumns.append(box.text())
if box.text() == 'Long Size (µm)':
longSize = self.sizes
elif box.text() == 'Short Size (µm)':
shortSize = np.round(np.array([i[1] if np.isnan(i[3]) else i[3] for i in self.particles]), 1)
elif box.text() == 'Area (µm²)':
area = np.array([np.round(float(entry[4]), 1) for entry in self.particles])
else:
requiredcolumns.append('0 - 5 µm')
requiredcolumns.append('5 - 10 µm')
requiredcolumns.append('10 - 20 µm')
requiredcolumns.append('20 - 50 µm')
requiredcolumns.append('50 - 100 µm')
requiredcolumns.append('> 100 µm')
finalData = np.zeros((self.polymers.shape[0],len(requiredcolumns)-1))
polymertypes = [""]*self.polymers.shape[0]
rowindex = 0
for polymer in np.unique(self.polymers):
indices = self.polymers == polymer
numentries = int(np.sum(indices))
print("Num:", numentries)
sys.stdout.flush()
for colindex, column in enumerate(requiredcolumns):
if column == 'Polymer Type (mandatory)':
polymertypes[rowindex:rowindex+numentries] = self.polymers[indices]
if column == 'Additives':
finalData[rowindex:rowindex+numentries, colindex-1] = self.additives[indices]
if column == 'Long Size (µm)':
finalData[rowindex:rowindex+numentries, colindex-1] = longSize[indices]
if column == 'Short Size (µm)':
finalData[rowindex:rowindex+numentries, colindex-1] = shortSize[indices]
if column == 'Area (µm²)':
finalData[rowindex:rowindex+numentries, colindex-1] = area[indices]
# hit quality index array does not match the data size if particles have been combined
#if column == 'HQI':
# finalData[rowindex:rowindex+numentries, colindex-1] = self.hqis[indices]
if '> 100 µm' in requiredcolumns:
##append size classes
numPrevCols = len(requiredcolumns) - 1 - len(self.sizeClasses) #number of previous columns
for tableindex, dataindex in enumerate(np.arange(len(indices))[indices]):
for classindex in range(len(self.sizeClasses)):
upLimit = self.sizeClasses[classindex]
if classindex == 0: lowLimit = 0
else: lowLimit = self.sizeClasses[classindex-1]
curSize = self.sizes[dataindex]
if curSize > lowLimit and curSize <= upLimit:
finalData[rowindex+tableindex, numPrevCols + classindex] = np.int(1)
else:
finalData[rowindex+tableindex, numPrevCols + classindex] = np.int(0)
rowindex = rowindex + numentries
#dump into excel file
xlsname = self.directory + '//' + self.xlsFileName.text() + '.xlsx'
print('exporting excel to:\n file name: {} in directory: {}'.format(self.xlsFileName.text(), self.directory))
validFileName = False
incr = 1
while not validFileName:
if not os.path.exists(xlsname):
validFileName = True
else:
xlsname = self.directory + self.xlsFileName.text() + ' {}.xlsx'.format(incr)
incr += 1
writer = pd.ExcelWriter(xlsname, engine = 'xlsxwriter')
df = pd.DataFrame(finalData, columns=requiredcolumns[1:])
df.insert(0, 'Polymer Type', polymertypes)
df.to_excel(writer, sheet_name = 'Individual Particles', index = False)
if '> 100 µm' in requiredcolumns:
#generate particle statistics report
header = ['0 - 5 µm', '5 - 10 µm', '10 - 20 µm', '20 - 50 µm', '50 - 100 µm', '> 100 µm']
index = np.unique(self.polymers)
particleclasses = []
for polymer in index:
indices = np.where(self.polymers == polymer)[0]
sortind = np.searchsorted([5,10,20,50,100], self.sizes[indices], 'right')
classes = np.bincount(sortind, minlength=6)
particleclasses.append(classes)
particleclasses = np.array(particleclasses)
report = pd.DataFrame(np.array(particleclasses), columns=header,
dtype=int)
report.insert(0, 'Polymer Type', index)
report.insert(len(report.columns), 'Sum total', particleclasses.sum(axis=1))
report.to_excel(writer, sheet_name = 'Particle Statistics', index=False)
writer.save()
self.accept()
class AdditiveViewer(QtWidgets.QWidget):
def __init__(self, polymername, sortedAdditives):
super(AdditiveViewer, self).__init__()
self.setGeometry(200,200, 800, 600)
self.setWindowTitle('Additives of {}'.format(polymername))
self.layout = QtWidgets.QGridLayout()
self.setLayout(self.layout)
self.canvas = FigureCanvas(Figure(figsize=(5, 3)))
self.ax = self.canvas.figure.subplots()
self.layout.addWidget(self.canvas, 0, 0)
self.ax.hist(sortedAdditives)
self.ax.set_ylabel('Number', fontsize = 15)
self.ax.tick_params(axis='both', which='both', labelsize=15)
\ No newline at end of file
# -*- coding: utf-8 -*-
"""
GEPARD - Gepard-Enabled PARticle Detection
Copyright (C) 2018 Lars Bittrich and Josef Brandt, Leibniz-Institut für
Polymerforschung Dresden e. V. <bittrich-lars@ipfdd.de>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program, see COPYING.
If not, see <https://www.gnu.org/licenses/>.
"""
import os
import numpy as np
import operator
class DataStats(object):
def __init__(self, dataset):
self.dataset = dataset
self.config = dataset.resultParams
self.spectraResults = None #entire List of all spectra assignments
self.additiveResults = None #entire List of all additives
self.particlestats = None
self.particleResults = None #final assignment for each particle
self.currentPolymers = None #list of polymers after setting entries with low hqi to unknown
self.currentAdditives = None #same thing for the additives
self.uniquePolymers = None #list of present polymer types
self.spectra = None #acquired spectra
self.indices = None #assignment of what spectra-indices belong to what substance
self.particles2spectra = None
self.manualPolymers = {}
self.manualAdditives = {}
def resetResults(self, spectraResults, additiveResults, hqis, addhqis):
self.spectraResults = spectraResults
self.additiveResults = additiveResults
self.hqis = hqis
self.addhqis = addhqis
def update(self):
print('updating data from', self.dataset.name)
self.spectraResults = self.dataset.results['polymers']
self.additiveResults = self.dataset.results['additives']
self.hqis = self.dataset.results['hqis']
self.addhqis = self.dataset.results['additive_hqis']
self.colorSeed = self.dataset.colorSeed
if type(self.colorSeed) != str:
self.colorSeed = 'default'
#load Spectra
if self.dataset.spectraPath is None:
fname = os.path.join(self.dataset.path, self.dataset.name + '_000_Spec.Data 1.txt')
else:
fname = self.dataset.spectraPath
return self.loadSpectra(fname)
def loadSpectra(self, fname):
import time
t0 = time.time()
specfilename = self.dataset.fname.split('.pkl')[0] + '_spectra.npy'
specs = None
if os.path.exists(specfilename):
specs = np.load(specfilename)
else:
try:
specs = np.loadtxt(fname)
#if spectra are already in correct format (WITec, first column: wavenumbers, other columns, intensities),
#we take them, otherwise we have to convert from Renishaw export format...
if not len(np.unique(specs[:, 0])) == len(specs[:, 0]): #--> only unique numbers -> this is the wavenumber column, we have the witec format
#Renishaw Convert
#columns 0 and 1 are x and y coordinates. We dont need them...
startWavenumber = specs[0, 2]
startIndices = np.where(specs[:, 2] == startWavenumber)[0]
spectra = np.zeros((startIndices[1], len(startIndices)+1)) #create array with shape (numWavenumbers, numSpectra+1) (first column holds wavenumbers)
spectra[:, 0] = specs[startIndices[0]:startIndices[1], 2]
for i in range(len(startIndices)-1):
spectra[:, i+1] = specs[startIndices[i]:startIndices[i+1], 3]
#aaand the last spectrum:
spectra[:, -1] = specs[startIndices[-1]:, 3]
specs = np.flip(spectra, 0) #Renishaw goes from highest to lowest wavenumber, out of whatever reason...
#write spectra to binary file, that makes reloading them in future significantly faster
np.save(specfilename, specs)
print('loading specs:', time.time()-t0)
self.dataset.spectraPath = fname
except:
pass
self.spectra = specs
return specs
def loadParticleData(self):
self.particlestats = np.array(self.dataset.particlestats)
pixelscale = (self.dataset.pixelscale_df if self.dataset.imagescanMode == 'df' else self.dataset.pixelscale_bf)
#convert to mikrometer scale
for index in range(len(self.particlestats)):
for subindex in range(5):
self.particlestats[index][subindex] = self.particlestats[index][subindex] * pixelscale #multiply by pixelscale
if subindex == 4:
self.particlestats[index][subindex] = self.particlestats[index][subindex] * pixelscale #again for the area...
self.particles2spectra = self.dataset.particles2spectra
sortindices = self.dataset.ramanscansortindex
if self.particles2spectra is None:
print('creating default particles2spectra list')
#no assignment found, so we assume one measurement per particle and use ramanscansortindex for assignment
self.particles2spectra = [[int(np.where(sortindices == i)[0])] for i in range(len(sortindices))]
#check, if dataset already contains results. Otherwise load them...
return not (self.spectraResults is None or (len(self.spectraResults) != len(sortindices)))
def invalidateSpectra(self):
self.spectraResults = ['empty']*(self.spectra.shape[1]-1)
self.hqis = [100]*(self.spectra.shape[1]-1)
def formatResults(self, hqi, compHqi):
if self.spectraResults is not None:
del self.currentPolymers, self.currentAdditives
#convert to arrays (makes indexing easier...)
self.currentPolymers, self.hqis = np.array(self.spectraResults), np.array(self.hqis)
if self.additiveResults is not None:
self.currentAdditives, self.addhqis = np.array(self.additiveResults), np.array(self.addhqis)
self.compHqiSpinBox.setDisabled(False)
else:
self.currentAdditives = None
#set poor HQI results to unknown
self.currentPolymers[self.hqis < hqi] = 'unknown'
if self.currentAdditives is not None:
self.currentAdditives[self.addhqis < compHqi] = 'unknown'
def createHistogramData(self):
self.uniquePolymers = np.unique(self.currentPolymers)
self.particleResults = [None]*len(self.particlestats)
self.typehistogram = {i: 0 for i in self.uniquePolymers}
if len(self.particles2spectra) != len(self.particlestats):
return False
for particleID, specList in enumerate(self.particles2spectra):
assignment = self.currentPolymers[specList[0]] #we take the first result as particle result. Hence, all spectra per particle have to have the same result
self.particleResults[particleID] = assignment
self.typehistogram[assignment] += 1
self.particleResults = np.array(self.particleResults)
##sort typehistogram, it will be converted into a list!!
self.typehistogram = sorted(self.typehistogram.items(), key = operator.itemgetter(1), reverse = True)
self.uniquePolymers = [i[0] for i in self.typehistogram]
self.indices = [] #what particles belong to which polymer type?
for polymer in self.uniquePolymers:
self.indices.append(list(np.where(self.particleResults == polymer)[0]))
###generate additive array for each type in typehistogram:
if self.currentAdditives is None:
self.sorted_additives = None
else:
self.sorted_additives = []
for polymer in self.typehistogram: #get additives of each polymer type
self.sorted_additives.append(self.currentAdditives[np.where(self.currentPolymers == polymer[0])])
for i in range(len(self.sorted_additives)): #sort out 'none' entries
nonentries = np.where(self.sorted_additives[i] == 'none')
self.sorted_additives[i] = np.delete(self.sorted_additives[i], nonentries)
return True
def saveAnalysisResults(self, minHQI, compHQI):
self.dataset.results = {'polymers': self.spectraResults,
'hqis': self.hqis,
'additives': self.additiveResults,
'additive_hqis': self.addhqis}
self.dataset.resultParams = {'minHQI': minHQI,
'compHQI': compHQI}
self.dataset.save()
print('saved dataset')
\ No newline at end of file
...@@ -12,13 +12,14 @@ import sys ...@@ -12,13 +12,14 @@ import sys
from os import chdir, getcwd from os import chdir, getcwd
class LoadWITecResults(QtWidgets.QDialog): class LoadWITecResults(QtWidgets.QDialog):
def __init__(self, parent): def __init__(self, datastats, parent):
super(LoadWITecResults, self).__init__() super(LoadWITecResults, self).__init__()
self.setGeometry(400, 400, 200, 300) self.setGeometry(400, 400, 200, 300)
self.setWindowTitle('Get Truematch Results') self.setWindowTitle('Get Truematch Results')
self.layout = QtWidgets.QGridLayout() self.layout = QtWidgets.QGridLayout()
self.setLayout(self.layout) self.setLayout(self.layout)
self.datastats = datastats
self.parent = parent self.parent = parent
self.parent.setDisabled(True) self.parent.setDisabled(True)
self.trueMatchResults = None self.trueMatchResults = None
...@@ -99,7 +100,7 @@ class LoadWITecResults(QtWidgets.QDialog): ...@@ -99,7 +100,7 @@ class LoadWITecResults(QtWidgets.QDialog):
self.editEntryWindow.show() self.editEntryWindow.show()
def loadFileManually(self): def loadFileManually(self):
dsetpath = self.parent.parent.dataset.path dsetpath = self.datastats.dataset.path
fnames =QtWidgets.QFileDialog.getOpenFileNames(self, 'Select TrueMatch result file', dsetpath, 'text file (*.txt)')[0] fnames =QtWidgets.QFileDialog.getOpenFileNames(self, 'Select TrueMatch result file', dsetpath, 'text file (*.txt)')[0]
if len(fnames) > 1: if len(fnames) > 1:
QtWidgets.QMessageBox.about(self, 'Info', 'The following order of files was loaded. If incorrect, please call a coder!\n{}'.format('\n'.join([fname for fname in fnames]))) QtWidgets.QMessageBox.about(self, 'Info', 'The following order of files was loaded. If incorrect, please call a coder!\n{}'.format('\n'.join([fname for fname in fnames])))
...@@ -247,22 +248,16 @@ class LoadWITecResults(QtWidgets.QDialog): ...@@ -247,22 +248,16 @@ class LoadWITecResults(QtWidgets.QDialog):
assert len(self.polymertypes) == len(self.resultList), 'incorrect number of polymer types added...' assert len(self.polymertypes) == len(self.resultList), 'incorrect number of polymer types added...'
del self.parent.spectraResults, self.parent.additiveResults, self.parent.hqis, self.parent.addhqis self.datastats.resetResults(self.polymertypes, self.additives,
self.parent.spectraResults = self.polymertypes self.hqis, self.addhqis)
self.parent.additiveResults = self.additives
self.parent.hqis = self.hqis
self.parent.addhqis = self.addhqis
self.parent.formatResults() self.parent.formatResults()
if len(self.manualPolymers) > 0: if len(self.manualPolymers) > 0:
self.reviewGroup.setDisabled(False) self.reviewGroup.setDisabled(False)
def closeEvent(self, event): def closeEvent(self, event):
del self.parent.spectraResults, self.parent.additiveResults, self.parent.hqis, self.parent.addhqis self.datastats.resetResults(self.polymertypes, self.additives,
self.parent.spectraResults = self.polymertypes self.hqis, self.addhqis)
self.parent.additiveResults = self.additives
self.parent.hqis = self.hqis
self.parent.addhqis = self.addhqis
self.parent.updateBtn.clicked.connect(self.parent.formatResults) self.parent.updateBtn.clicked.connect(self.parent.formatResults)
self.parent.formatResults() self.parent.formatResults()
self.parent.show_hide_labels() self.parent.show_hide_labels()
......
...@@ -27,10 +27,10 @@ If not, see <https://www.gnu.org/licenses/>. ...@@ -27,10 +27,10 @@ If not, see <https://www.gnu.org/licenses/>.
import numpy as np import numpy as np
import cv2 import cv2
from PyQt5 import QtWidgets from PyQt5 import QtWidgets
#import matplotlib.pyplot as plt
class ParticleEditor(object): class ParticleEditor(object):
def __init__(self, parent): def __init__(self, datastats, parent):
self.datastats = datastats
self.parent = parent #the assigned analysis widget self.parent = parent #the assigned analysis widget
self.backupFreq = 3 #save a backup every n actions self.backupFreq = 3 #save a backup every n actions
self.neverBackedUp = True self.neverBackedUp = True
...@@ -39,7 +39,7 @@ class ParticleEditor(object): ...@@ -39,7 +39,7 @@ class ParticleEditor(object):
def createSafetyBackup(self): def createSafetyBackup(self):
self.actionCounter += 1 self.actionCounter += 1
if self.actionCounter == self.backupFreq-1 or self.neverBackedUp: if self.actionCounter == self.backupFreq-1 or self.neverBackedUp:
backupname = self.parent.parent.dataset.saveBackup() backupname = self.datastats.dataset.saveBackup()
print('backing up as', backupname) print('backing up as', backupname)
self.neverBackedUp = False self.neverBackedUp = False
self.actionCounter = 0 self.actionCounter = 0
...@@ -52,12 +52,14 @@ class ParticleEditor(object): ...@@ -52,12 +52,14 @@ class ParticleEditor(object):
def combineParticles(self, contourIndices, new_assignment): def combineParticles(self, contourIndices, new_assignment):
if new_assignment == 'other': if new_assignment == 'other':
new_assignment = self.getNewEntry() new_assignment = self.getNewEntry()
if new_assignment is None:
return
contourIndices = sorted(contourIndices) #we want to keep the contour with lowest index contourIndices = sorted(contourIndices) #we want to keep the contour with lowest index
print('merging contours:', contourIndices) print('merging contours:', contourIndices)
self.createSafetyBackup() self.createSafetyBackup()
#get contours: #get contours:
contours = [self.parent.parent.dataset.particlecontours[i] for i in contourIndices] contours = [self.datastats.dataset.particlecontours[i] for i in contourIndices]
cnt = np.vstack(tuple(contours)) #combine contous cnt = np.vstack(tuple(contours)) #combine contous
#draw contours #draw contours
...@@ -70,7 +72,7 @@ class ParticleEditor(object): ...@@ -70,7 +72,7 @@ class ParticleEditor(object):
img = np.zeros((rangey, rangex)) img = np.zeros((rangey, rangex))
for i in contourIndices: for i in contourIndices:
curCnt = self.parent.parent.dataset.particlecontours[i] curCnt = self.datastats.dataset.particlecontours[i]
for i in range(len(curCnt)): for i in range(len(curCnt)):
curCnt[i][0][0] -= xmin-padding curCnt[i][0][0] -= xmin-padding
curCnt[i][0][1] -= ymin-padding curCnt[i][0][1] -= ymin-padding
...@@ -80,7 +82,10 @@ class ParticleEditor(object): ...@@ -80,7 +82,10 @@ class ParticleEditor(object):
img = np.uint8(cv2.morphologyEx(img, cv2.MORPH_CLOSE, np.ones((3, 3)))) img = np.uint8(cv2.morphologyEx(img, cv2.MORPH_CLOSE, np.ones((3, 3))))
temp, contours, hierarchy = cv2.findContours(img, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE) if cv2.__version__ > '3.5':
contours, hierarchy = cv2.findContours(img, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
else:
temp, contours, hierarchy = cv2.findContours(img, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
newContour = contours[0] newContour = contours[0]
stats = self.characterizeParticle(newContour) stats = self.characterizeParticle(newContour)
...@@ -91,65 +96,67 @@ class ParticleEditor(object): ...@@ -91,65 +96,67 @@ class ParticleEditor(object):
#check, if dataset contains (already modified) particle2spectra, otherwise create new. #check, if dataset contains (already modified) particle2spectra, otherwise create new.
if self.parent.parent.dataset.particles2spectra is None: #create default assignment if self.datastats.dataset.particles2spectra is None: #create default assignment
print('recreating particles2spectra from within edit particles...') print('recreating particles2spectra from within edit particles...')
sortindices = self.parent.parent.dataset.ramanscansortindex sortindices = self.datastats.dataset.ramanscansortindex
self.parent.parent.dataset.particles2spectra = [[int(np.where(sortindices == i)[0])] for i in range(len(sortindices))] self.datastats.dataset.particles2spectra = [[int(np.where(sortindices == i)[0])] for i in range(len(sortindices))]
#Contour indices are the same as the original particlestats, which are contained in the dataset. #Contour indices are the same as the original particlestats, which are contained in the dataset.
#We have to modify that and reload in the analysisview #We have to modify that and reload in the analysisview
#first, overwrite first index with new particlestats #first, overwrite first index with new particlestats
self.parent.parent.dataset.particlestats[contourIndices[0]] = stats self.datastats.dataset.particlestats[contourIndices[0]] = stats
#now, delete the rest... #now, delete the rest...
self.parent.parent.dataset.particlestats = [i for ind, i in enumerate(self.parent.parent.dataset.particlestats) if ind not in contourIndices[1:]] self.datastats.dataset.particlestats = [i for ind, i in enumerate(self.datastats.dataset.particlestats) if ind not in contourIndices[1:]]
#same with the contours #same with the contours
self.parent.parent.dataset.particlecontours[contourIndices[0]] = newContour self.datastats.dataset.particlecontours[contourIndices[0]] = newContour
self.parent.parent.dataset.particlecontours = [i for ind, i in enumerate(self.parent.parent.dataset.particlecontours) if ind not in contourIndices[1:]] self.datastats.dataset.particlecontours = [i for ind, i in enumerate(self.datastats.dataset.particlecontours) if ind not in contourIndices[1:]]
#update particle2spectra_list #update particle2spectra_list
#what is the current particle index?? #what is the current particle index??
specIndices = [] specIndices = []
#other spectra indices: #other spectra indices:
for index in contourIndices: for index in contourIndices:
specIndices.append(self.parent.particles2spectra[index]) specIndices.append(self.datastats.particles2spectra[index])
#flatten index list (in case, that a nested list was created...) #flatten index list (in case, that a nested list was created...)
specIndices = list(np.concatenate(specIndices)) specIndices = list(np.concatenate(specIndices))
for i in specIndices: for i in specIndices:
self.parent.spectraResults[i] = new_assignment self.datastats.spectraResults[i] = new_assignment
self.parent.hqis[i] = 100 #avoid sorting them out again by hqi-filter... self.datastats.hqis[i] = 100 #avoid sorting them out again by hqi-filter...
print(f'spectrum {i} of particle{contourIndices[0]} is now {new_assignment}') print(f'spectrum {i} of particle{contourIndices[0]} is now {new_assignment}')
#modify particles2spectra.. #modify particles2spectra..
self.parent.parent.dataset.particles2spectra[contourIndices[0]] = specIndices self.datastats.dataset.particles2spectra[contourIndices[0]] = specIndices
for index in reversed(contourIndices[1:]): for index in reversed(contourIndices[1:]):
print('removing index from particles2spectra:', index) print('removing index from particles2spectra:', index)
del self.parent.parent.dataset.particles2spectra[index] del self.datastats.dataset.particles2spectra[index]
#save data #save data
self.parent.saveAnalysisResults() self.datastats.saveAnalysisResults()
#update contours in sampleview #update contours in sampleview
self.parent.parent.contouritem.resetContours(self.parent.parent.dataset.particlecontours) self.parent.parent.contouritem.resetContours(self.datastats.dataset.particlecontours)
self.parent.loadParticleData() self.parent.loadParticleData()
def reassignParticles(self, contourindices, new_assignment): def reassignParticles(self, contourindices, new_assignment):
if new_assignment == 'other': if new_assignment == 'other':
new_assignment = self.getNewEntry() new_assignment = self.getNewEntry()
if new_assignment is None:
return
self.createSafetyBackup() self.createSafetyBackup()
print(f'reassigning indices {contourindices} into {new_assignment}') print(f'reassigning indices {contourindices} into {new_assignment}')
for partIndex in contourindices: for partIndex in contourindices:
for specIndex in self.parent.particles2spectra[partIndex]: for specIndex in self.datastats.particles2spectra[partIndex]:
self.parent.currentPolymers[specIndex] = new_assignment self.datastats.currentPolymers[specIndex] = new_assignment
self.parent.spectraResults[specIndex] = new_assignment self.datastats.spectraResults[specIndex] = new_assignment
self.parent.hqis[specIndex] = 100 self.datastats.hqis[specIndex] = 100
#save data #save data
self.parent.saveAnalysisResults() self.datastats.saveAnalysisResults()
self.parent.loadParticleData() self.parent.loadParticleData()
......
This diff is collapsed.
...@@ -24,7 +24,7 @@ import numpy as np ...@@ -24,7 +24,7 @@ import numpy as np
WX, WY = 1024, 200 WX, WY = 1024, 200
class Legend(QtWidgets.QMdiSubWindow): class ColorLegend(QtWidgets.QMdiSubWindow):
def __init__(self, parent=None): def __init__(self, parent=None):
super().__init__(parent) super().__init__(parent)
...@@ -38,6 +38,14 @@ class Legend(QtWidgets.QMdiSubWindow): ...@@ -38,6 +38,14 @@ class Legend(QtWidgets.QMdiSubWindow):
self.fontSize = 15 self.fontSize = 15
self.spacer = 10 self.spacer = 10
def setTextColorItems(self, items):
for text, color in items:
print(text, color)
assert type(text)==str or type(text)==np.str_, "items must be tuples of text and QColor"
assert type(color)==QtGui.QColor or type(color)==QtCore.Qt.GlobalColor, "items must be tuples of text and QColor"
self.items = items
self.update()
def mousePressEvent(self, event): def mousePressEvent(self, event):
if event.button()==QtCore.Qt.LeftButton: if event.button()==QtCore.Qt.LeftButton:
self.drag = event.pos() self.drag = event.pos()
......
...@@ -329,6 +329,3 @@ class DataSet(object): ...@@ -329,6 +329,3 @@ class DataSet(object):
return filename return filename
# backupNameNotFound = False # backupNameNotFound = False
if __name__ == '__main__':
dset = loadData(r'D:\Projekte\Mikroplastik\Microcatch_BALT\Sampling Kampagne 1\MCI_2\MCI_2_all_kleiner500\MCI_2_ds1+2_all_kleiner500_10_1\MCI_2_ds1+2_all_kleiner500_10_1.pkl')
...@@ -23,23 +23,22 @@ from sampleview import SampleView ...@@ -23,23 +23,22 @@ from sampleview import SampleView
from scalebar import ScaleBar from scalebar import ScaleBar
from ramancom.ramancontrol import defaultPath from ramancom.ramancontrol import defaultPath
from ramancom.ramanSwitch import RamanSwitch from ramancom.ramanSwitch import RamanSwitch
from analysis.analysisWidgets import Legend from colorlegend import ColorLegend
import os import os
from pathlib import Path
class MeasureParticleWindow(QtWidgets.QMainWindow): class MeasureParticleWindow(QtWidgets.QMainWindow):
def __init__(self): def __init__(self, logpath):
super(MeasureParticleWindow, self).__init__() super(MeasureParticleWindow, self).__init__()
self.setWindowTitle("GEPARD") self.setWindowTitle("GEPARD")
self.resize(900, 700) self.resize(900, 700)
self.view = SampleView() self.view = SampleView(logpath)
self.view.imparent = self self.view.imparent = self
self.view.ScalingChanged.connect(self.scalingChanged) self.view.ScalingChanged.connect(self.scalingChanged)
self.scalebar = ScaleBar(self) self.scalebar = ScaleBar(self)
self.legend = Legend(self) self.legend = ColorLegend(self)
self.ramanSwitch = RamanSwitch(self) self.ramanSwitch = RamanSwitch(self)
self.view.ScalingChanged.connect(self.scalebar.updateScale) self.view.ScalingChanged.connect(self.scalebar.updateScale)
...@@ -288,21 +287,26 @@ class MeasureParticleWindow(QtWidgets.QMainWindow): ...@@ -288,21 +287,26 @@ class MeasureParticleWindow(QtWidgets.QMainWindow):
self.addToolBar(QtCore.Qt.LeftToolBarArea, self.toolbar) self.addToolBar(QtCore.Qt.LeftToolBarArea, self.toolbar)
if __name__ == '__main__': if __name__ == '__main__':
import sys import sys
from time import localtime, strftime from time import localtime, strftime
logpath = os.path.join(Path.home(),'gepard')
if not os.path.exists(logpath):
os.mkdir(logpath)
logname = os.path.join(logpath, 'logfile.txt')
fp = open(logname, "a")
sys.stderr = fp
sys.stdout = fp
print("starting GEPARD at: " + strftime("%d %b %Y %H:%M:%S", localtime()), flush=True)
sys.stdout.flush()
app = QtWidgets.QApplication(sys.argv) app = QtWidgets.QApplication(sys.argv)
meas = MeasureParticleWindow() app.setApplicationName("GEPARD") # appname needed for logpath
logpath = QtCore.QStandardPaths.writableLocation(
QtCore.QStandardPaths.AppLocalDataLocation)
if logpath != "":
if not os.path.exists(logpath):
os.mkdir(logpath)
logname = os.path.join(logpath, 'logfile.txt')
fp = open(logname, "a")
sys.stderr = fp
sys.stdout = fp
print("starting GEPARD at: " + strftime("%d %b %Y %H:%M:%S", localtime()),
flush=True)
meas = MeasureParticleWindow(logpath)
meas.showMaximized() meas.showMaximized()
ret = app.exec_() ret = app.exec_()
\ No newline at end of file
...@@ -27,54 +27,59 @@ from imagestitch import imageStacking ...@@ -27,54 +27,59 @@ from imagestitch import imageStacking
import os import os
import cv2 import cv2
from helperfunctions import cv2imread_fix, cv2imwrite_fix from helperfunctions import cv2imread_fix, cv2imwrite_fix
from time import sleep, time from time import time
import datetime import datetime
import sys import sys
from pathlib import Path
#def scan(path, sol, zpositions, grid, controlclass, connection, ishdr=False): def scan(path, sol, zpositions, grid, controlclass, dataqueue,
def scan(path, sol, zpositions, grid, controlclass, dataqueue, stopevent, ishdr=False): stopevent, logpath='', ishdr=False):
print('starting new optical scan')
sys.stdout.flush()
if ishdr: if ishdr:
merge_mertens = cv2.createMergeMertens() merge_mertens = cv2.createMergeMertens()
logpath = os.path.join(Path.home(),'gepard', 'scanlog.txt') fp = None
with open(logpath, "a") as fp: if logpath != '':
sys.stderr = fp try:
sys.stdout = fp fp = open(os.path.join(logpath, 'scanlog.txt'), 'a')
sys.stderr = fp
sys.stdout = fp
except IOError:
print('separate loging failed', flush=True)
pass
print('starting new optical scan', flush=True)
ramanctrl = controlclass() ramanctrl = controlclass()
ramanctrl.connect() ramanctrl.connect()
zlist = list(enumerate(zpositions)) zlist = list(enumerate(zpositions))
for i, p in enumerate(grid): for i, p in enumerate(grid):
x, y = p x, y = p
z = sol[0]*x + sol[1]*y + sol[2] z = sol[0]*x + sol[1]*y + sol[2]
for k, zk in (zlist if i%2==0 else zlist[::-1]): for k, zk in (zlist if i%2==0 else zlist[::-1]):
name = f"image_{i}_{k}.bmp" name = f"image_{i}_{k}.bmp"
print("time:", time()) print("time:", time(), flush=True)
zik = z+zk zik = z+zk
assert not np.isnan(zik) assert not np.isnan(zik)
print("moving to:", x, y, zik) print("moving to:", x, y, zik, flush=True)
ramanctrl.moveToAbsolutePosition(x, y, zik) ramanctrl.moveToAbsolutePosition(x, y, zik)
if ishdr: if ishdr:
img_list = [] img_list = []
fname = os.path.join(path,f"tmp.bmp") fname = os.path.join(path,f"tmp.bmp")
values = [5.,25.,100.] values = [5.,25.,100.]
for j, val in enumerate(values if (i%2+k%2)%2==0 else reversed(values)): for j, val in enumerate(values if (i%2+k%2)%2==0 else reversed(values)):
ramanctrl.setBrightness(val) ramanctrl.setBrightness(val)
ramanctrl.saveImage(fname) ramanctrl.saveImage(fname)
img_list.append(cv2imread_fix(fname)) img_list.append(cv2imread_fix(fname))
res_mertens = merge_mertens.process(img_list) res_mertens = merge_mertens.process(img_list)
res_mertens_8bit = np.clip(res_mertens*255, 0, 255).astype('uint8') res_mertens_8bit = np.clip(res_mertens*255, 0, 255).astype('uint8')
cv2imwrite_fix(os.path.join(path,name), res_mertens_8bit) cv2imwrite_fix(os.path.join(path,name), res_mertens_8bit)
else: else:
ramanctrl.saveImage(os.path.join(path,name)) ramanctrl.saveImage(os.path.join(path,name))
if stopevent.is_set(): if stopevent.is_set():
ramanctrl.disconnect() ramanctrl.disconnect()
return return
dataqueue.put(i) dataqueue.put(i)
ramanctrl.disconnect() ramanctrl.disconnect()
if fp is not None:
fp.close()
def loadAndPasteImage(srcnames, fullimage, fullzval, width, height, def loadAndPasteImage(srcnames, fullimage, fullzval, width, height,
...@@ -212,8 +217,9 @@ class OpticalScan(QtWidgets.QWidget): ...@@ -212,8 +217,9 @@ class OpticalScan(QtWidgets.QWidget):
imageUpdate = QtCore.pyqtSignal(str, name='imageUpdate') #str = 'df' (= darkfield) or 'bf' (=bright field) imageUpdate = QtCore.pyqtSignal(str, name='imageUpdate') #str = 'df' (= darkfield) or 'bf' (=bright field)
boundaryUpdate = QtCore.pyqtSignal() boundaryUpdate = QtCore.pyqtSignal()
def __init__(self, ramanctrl, dataset, parent=None): def __init__(self, ramanctrl, dataset, logpath='', parent=None):
super().__init__(parent, QtCore.Qt.Window) super().__init__(parent, QtCore.Qt.Window)
self.logpath = logpath
self.view = parent self.view = parent
vbox = QtWidgets.QVBoxLayout() vbox = QtWidgets.QVBoxLayout()
pointgroup = QtWidgets.QGroupBox("Point coordinates [µm]", self) pointgroup = QtWidgets.QGroupBox("Point coordinates [µm]", self)
...@@ -508,7 +514,7 @@ class OpticalScan(QtWidgets.QWidget): ...@@ -508,7 +514,7 @@ class OpticalScan(QtWidgets.QWidget):
A = np.ones((points.shape[0],3)) A = np.ones((points.shape[0],3))
A[:,:2] = points[:,:2] A[:,:2] = points[:,:2]
b = points[:,2] b = points[:,2]
sol = np.linalg.lstsq(A,b)[0] sol = np.linalg.lstsq(A, b, rcond=None)[0]
self.dataset.heightmap = sol self.dataset.heightmap = sol
print("Fit deviation:", sol[0]*points[:,0]+sol[1]*points[:,1]+sol[2] -points[:,2] ) print("Fit deviation:", sol[0]*points[:,0]+sol[1]*points[:,1]+sol[2] -points[:,2] )
...@@ -545,7 +551,8 @@ class OpticalScan(QtWidgets.QWidget): ...@@ -545,7 +551,8 @@ class OpticalScan(QtWidgets.QWidget):
self.dataqueue = Queue() self.dataqueue = Queue()
self.process = Process(target=scan, args=(path, sol, self.dataset.zpositions, self.process = Process(target=scan, args=(path, sol, self.dataset.zpositions,
self.dataset.grid, self.ramanctrl.__class__, self.dataset.grid, self.ramanctrl.__class__,
self.dataqueue, self.processstopevent, self.hdrcheck.isChecked())) self.dataqueue, self.processstopevent,
self.logpath, self.hdrcheck.isChecked()))
self.process.start() self.process.start()
self.starttime = time() self.starttime = time()
self.progresstime.setEnabled(True) self.progresstime.setEnabled(True)
...@@ -619,8 +626,10 @@ class OpticalScan(QtWidgets.QWidget): ...@@ -619,8 +626,10 @@ class OpticalScan(QtWidgets.QWidget):
if __name__ == "__main__": if __name__ == "__main__":
from ramancom.simulatedraman import SimulatedRaman from ramancom.simulatedraman import SimulatedRaman
from dataset import DataSet
app = QtWidgets.QApplication(sys.argv) app = QtWidgets.QApplication(sys.argv)
optscan = OpticalScan(SimulatedRaman()) ds = DataSet('Test')
optscan = OpticalScan(SimulatedRaman(), ds)
optscan.show() optscan.show()
sys.exit(app.exec_()) sys.exit(app.exec_())
\ No newline at end of file
...@@ -23,12 +23,11 @@ from PyQt5 import QtCore, QtWidgets ...@@ -23,12 +23,11 @@ from PyQt5 import QtCore, QtWidgets
import numpy as np import numpy as np
from multiprocessing import Process, Queue, Event from multiprocessing import Process, Queue, Event
import queue import queue
from time import sleep, time from time import time
from external import tsp from external import tsp
import datetime import datetime
import sys import sys
import os import os
from pathlib import Path
def reorder(points, N=20): def reorder(points, N=20):
y0, y1 = points[:,1].min(), points[:,1].max() y0, y1 = points[:,1].min(), points[:,1].max()
...@@ -48,44 +47,49 @@ def reorder(points, N=20): ...@@ -48,44 +47,49 @@ def reorder(points, N=20):
assert np.unique(newind).shape[0]==allind.shape[0] assert np.unique(newind).shape[0]==allind.shape[0]
return newind return newind
def scan(ramanSettings, positions, controlclass, dataqueue, stopevent): def scan(ramanSettings, positions, controlclass, dataqueue, stopevent,
logpath = os.path.join(Path.home(),'gepard', 'ramanscanlog.txt') logpath=''):
with open(logpath, "a") as fp: fp = None
sys.stderr = fp if logpath != '':
sys.stdout = fp try:
fp = open(os.path.join(logpath, 'ramanscanlog.txt'), 'a')
sys.stderr = fp
sys.stdout = fp
except IOError:
print('separate loging failed', flush=True)
pass
ramanctrl = controlclass() ramanctrl = controlclass()
ramanctrl.connect() ramanctrl.connect()
print("connected:", time()) print("connected:", time(), flush=True)
ramanctrl.initiateMeasurement(ramanSettings) ramanctrl.initiateMeasurement(ramanSettings)
for i, p in enumerate(positions): for i, p in enumerate(positions):
x, y, z = p x, y, z = p
print("time:", time()) print("time:", time(), flush=True)
print("position:", x, y, z) print("position:", x, y, z, flush=True)
sys.stdout.flush() #remove this line after testing ramanctrl.moveToAbsolutePosition(x, y, z, measurementRunning=True)
ramanctrl.moveToAbsolutePosition(x, y, z, measurementRunning=True) print("move done", flush=True)
print("move done") ramanctrl.triggerMeasurement(i)
sys.stdout.flush() print("trigger done", flush=True)
ramanctrl.triggerMeasurement(i)
print("trigger done") if stopevent.is_set():
sys.stdout.flush() ramanctrl.disconnect()
return
if stopevent.is_set():
ramanctrl.disconnect() dataqueue.put(i)
return
dataqueue.put(i)
ramanctrl.disconnect()
ramanctrl.disconnect()
if fp is not None:
fp.close()
class RamanScanUI(QtWidgets.QWidget): class RamanScanUI(QtWidgets.QWidget):
imageUpdate = QtCore.pyqtSignal(str, name='imageUpdate') #str = 'df' (= darkfield) or 'bf' (=bright field) imageUpdate = QtCore.pyqtSignal(str, name='imageUpdate') #str = 'df' (= darkfield) or 'bf' (=bright field)
ramanscanUpdate = QtCore.pyqtSignal() ramanscanUpdate = QtCore.pyqtSignal()
def __init__(self, ramanctrl, dataset, parent=None): def __init__(self, ramanctrl, dataset, logpath='', parent=None):
super().__init__(parent, QtCore.Qt.Window) super().__init__(parent, QtCore.Qt.Window)
self.view = parent self.view = parent
self.logpath = logpath
self.ramanctrl = ramanctrl self.ramanctrl = ramanctrl
self.dataset = dataset self.dataset = dataset
self.process = None self.process = None
...@@ -242,7 +246,11 @@ class RamanScanUI(QtWidgets.QWidget): ...@@ -242,7 +246,11 @@ class RamanScanUI(QtWidgets.QWidget):
self.ramanctrl.disconnect() self.ramanctrl.disconnect()
self.processstopevent = Event() self.processstopevent = Event()
self.dataqueue = Queue() self.dataqueue = Queue()
self.process = Process(target=scan, args=(ramanSettings, scanpoints, self.ramanctrl.__class__, self.dataqueue, self.processstopevent)) self.process = Process(target=scan, args=(ramanSettings, scanpoints,
self.ramanctrl.__class__,
self.dataqueue,
self.processstopevent,
self.logpath))
self.process.start() self.process.start()
self.starttime = time() self.starttime = time()
self.timer = QtCore.QTimer(self) self.timer = QtCore.QTimer(self)
......
...@@ -35,9 +35,10 @@ from ramancom.configRaman import RamanConfigWin ...@@ -35,9 +35,10 @@ from ramancom.configRaman import RamanConfigWin
class SampleView(QtWidgets.QGraphicsView): class SampleView(QtWidgets.QGraphicsView):
ScalingChanged = QtCore.pyqtSignal(float) ScalingChanged = QtCore.pyqtSignal(float)
def __init__(self): def __init__(self, logpath):
super(SampleView, self).__init__() super(SampleView, self).__init__()
self.logpath = logpath
self.item = QtWidgets.QGraphicsPixmapItem() self.item = QtWidgets.QGraphicsPixmapItem()
self.item.setPos(0, 0) self.item.setPos(0, 0)
self.item.setAcceptedMouseButtons(QtCore.Qt.NoButton) self.item.setAcceptedMouseButtons(QtCore.Qt.NoButton)
...@@ -77,9 +78,9 @@ class SampleView(QtWidgets.QGraphicsView): ...@@ -77,9 +78,9 @@ class SampleView(QtWidgets.QGraphicsView):
self.contouritem = SegmentationContours(self) self.contouritem = SegmentationContours(self)
scene.addItem(self.contouritem) scene.addItem(self.contouritem)
self.detectionwidget = None self.detectionwidget = None
self.ramanwidget = RamanScanUI(self.ramanctrl, None, self) self.ramanwidget = RamanScanUI(self.ramanctrl, None, self.logpath, self)
self.ramanwidget.imageUpdate.connect(self.loadPixmap) self.ramanwidget.imageUpdate.connect(self.loadPixmap)
self.oscanwidget = OpticalScan(self.ramanctrl, None, self) self.oscanwidget = OpticalScan(self.ramanctrl, None, self.logpath, self)
self.oscanwidget.imageUpdate.connect(self.loadPixmap) self.oscanwidget.imageUpdate.connect(self.loadPixmap)
self.oscanwidget.boundaryUpdate.connect(self.resetBoundary) self.oscanwidget.boundaryUpdate.connect(self.resetBoundary)
self.analysiswidget = None self.analysiswidget = None
...@@ -194,7 +195,7 @@ class SampleView(QtWidgets.QGraphicsView): ...@@ -194,7 +195,7 @@ class SampleView(QtWidgets.QGraphicsView):
self.ramanwidget.setVisible(False) self.ramanwidget.setVisible(False)
if self.analysiswidget is None: if self.analysiswidget is None:
print('creating new analysiswidget') print('creating new analysiswidget')
self.analysiswidget = ParticleAnalysis(self) self.analysiswidget = ParticleAnalysis(self.dataset, self)
self.analysiswidget.showMaximized() self.analysiswidget.showMaximized()
else: else:
print('show maximized already exisiting analysiswidget') print('show maximized already exisiting analysiswidget')
...@@ -477,7 +478,7 @@ class SampleView(QtWidgets.QGraphicsView): ...@@ -477,7 +478,7 @@ class SampleView(QtWidgets.QGraphicsView):
self.boundaryitems = edges, nodes self.boundaryitems = edges, nodes
self.resetScanPositions() self.resetScanPositions()
@QtCore.pyqtSlot(int, float) @QtCore.pyqtSlot(int, bool)
def selectContour(self, index, centerOn=True): def selectContour(self, index, centerOn=True):
if self.analysiswidget is not None: if self.analysiswidget is not None:
if self.analysiswidget.uniquePolymers is not None: if self.analysiswidget.uniquePolymers is not None:
......
...@@ -197,7 +197,10 @@ class Segmentation(object): ...@@ -197,7 +197,10 @@ class Segmentation(object):
def erodeConvexDefects(self, thresh, numiter): def erodeConvexDefects(self, thresh, numiter):
thresh = cv2.copyMakeBorder(thresh, 1, 1, 1, 1, 0) thresh = cv2.copyMakeBorder(thresh, 1, 1, 1, 1, 0)
for iterations in range(numiter): for iterations in range(numiter):
thresh2, contours, hierarchy = cv2.findContours(thresh.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) if cv2.__version__ > '3.5':
contours, hierarchy = cv2.findContours(thresh.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
else:
thresh2, contours, hierarchy = cv2.findContours(thresh.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
for cnt in contours: for cnt in contours:
hull = cv2.convexHull(cnt, returnPoints = False) hull = cv2.convexHull(cnt, returnPoints = False)
defects = cv2.convexityDefects(cnt, hull) defects = cv2.convexityDefects(cnt, hull)
...@@ -387,7 +390,11 @@ class Segmentation(object): ...@@ -387,7 +390,11 @@ class Segmentation(object):
if return_step=="watershed": if return_step=="watershed":
return np.uint8(255*(markers!=0)), 0 return np.uint8(255*(markers!=0)), 0
temp, contours, hierarchy = cv2.findContours(markers, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
if cv2.__version__ > '3.5':
contours, hierarchy = cv2.findContours(markers, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
else:
temp, contours, hierarchy = cv2.findContours(markers, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
print("contours") print("contours")
if self.cancelcomputation: if self.cancelcomputation:
return None, None, None return None, None, None
......