Commit f458c65b authored by Lars Bittrich's avatar Lars Bittrich

segmentation.py: adaptation to allow usage of OpenCV 4.0 alongside 3.4

editParticles.py: correctly handle cancelation of Combine and Reassign actions; adaptation to allow usage of OpenCV 4.0 alongside 3.4
analysisview.py: some first restructuring; export dialogs are now QDialogs instead of QWidgets; restructuring of ExcelExport and bugfixes therein; hit quality index export is broken and now set to zero instead wrong numbers
parent 8c55b20a
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#!/usr/bin/env python3
"""
GEPARD - Gepard-Enabled PARticle Detection
Copyright (C) 2018 Lars Bittrich and Josef Brandt, Leibniz-Institut für
......@@ -20,8 +20,9 @@ along with this program, see COPYING.
If not, see <https://www.gnu.org/licenses/>.
"""
import numpy as np
from PyQt5 import QtCore, QtGui, QtWidgets
import numpy as np
import pandas as pd
import sys
import operator
import os
......@@ -32,10 +33,9 @@ from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
import pandas as pd
from analysis.loadresults import LoadWITecResults
from analysis.editParticles import ParticleEditor
from analysis.database import DataBaseWindow
from .loadresults import LoadWITecResults
from .editParticles import ParticleEditor
from .database import DataBaseWindow
try:
from analysis.sqlexport import SQLExport
sqlEnabled = True
......@@ -58,7 +58,6 @@ class ParticleAnalysis(QtWidgets.QMainWindow):
self.config = self.parent.dataset.resultParams
self.editor = ParticleEditor(self)
self.expWin = None
self.spectraResults = None #entire List of all spectra assignments
self.additiveResults = None #entire List of all additives
......@@ -70,7 +69,6 @@ class ParticleAnalysis(QtWidgets.QMainWindow):
self.uniquePolymers = None #list of present polymer types
self.spectra = None #acquired spectra
self.indices = None #assignment of what spectra-indices belong to what substance
self.expWindow = None
self.additivePlot = None
self.importWindow = None
self.directory = None
......@@ -573,18 +571,13 @@ class ParticleAnalysis(QtWidgets.QMainWindow):
self.createPolymerOverlay()
def exportToExcel(self):
if self.expWin is not None:
self.expWin.close()
self.expWin.destroy()
self.expWin = ExpExcelDialog(self)
self.expWin.show()
expWin = ExpExcelDialog(self)
expWin.exec()
def exportToSQL(self):
if self.expWin is not None:
self.expWin.close()
self.expWin.destroy()
self.expWin = SQLExport(self)
self.expWin.show()
sqlexp = SQLExport(self)
sqlexp.exec()
def updateSpecPlot(self, centerOn=True, highlightContour=True):
#draw Sample Spectrum
......@@ -859,14 +852,14 @@ class ParticleAnalysis(QtWidgets.QMainWindow):
def closeEvent(self, event):
for window in [self.expWindow, self.additivePlot, self.importWindow, self.dbWin]:
for window in [self.additivePlot, self.importWindow, self.dbWin]:
try: window.close()
except: pass
self.parent.imparent.particelAnalysisAct.setChecked(False)
event.accept()
class ExpExcelDialog(QtWidgets.QWidget):
class ExpExcelDialog(QtWidgets.QDialog):
def __init__(self, parent):
super(ExpExcelDialog, self).__init__()
self.setWindowTitle('Export Options')
......@@ -919,22 +912,6 @@ class ExpExcelDialog(QtWidgets.QWidget):
self.layout.addWidget(excelgroup)
# sqlGroup = QtWidgets.QGroupBox('Export to SQL')
# sqlLayout = QtWidgets.QVBoxLayout()
#
# self.sqlbtn = QtWidgets.QPushButton('Export to SQL Database')
# self.sqlbtn.resize(self.sqlbtn.sizeHint())
# if sqlEnabled:
# self.sqlbtn.clicked.connect(self.toSQL)
# else:
# self.sqlbtn.setDisabled(True)
#
# self.sqlExport = None
#
# sqlLayout.addWidget(self.sqlbtn)
# sqlGroup.setLayout(sqlLayout)
#
# self.layout.addWidget(sqlGroup)
self.show()
def toExcel(self):
......@@ -958,35 +935,34 @@ class ExpExcelDialog(QtWidgets.QWidget):
requiredcolumns.append('50 - 100 µm')
requiredcolumns.append('> 100 µm')
self.finalData = np.array(['nothing to see here, iam a boring placeholder']*((self.polymers.shape[0]+1)*len(requiredcolumns))).reshape(((self.polymers.shape[0]+1), len(requiredcolumns)))
#create header:
self.finalData[0, :] = np.array(requiredcolumns)
self.finalData[0, 0] = 'Polymer Type'
rowindex = 1
finalData = np.zeros((self.polymers.shape[0],len(requiredcolumns)-1))
polymertypes = [""]*self.polymers.shape[0]
rowindex = 0
for polymer in np.unique(self.polymers):
indices = np.where(self.polymers == polymer)[0]
numentries = int(len(indices))
indices = self.polymers == polymer
numentries = int(np.sum(indices))
print("Num:", numentries)
sys.stdout.flush()
for colindex, column in enumerate(requiredcolumns):
if column == 'Polymer Type (mandatory)':
self.finalData[rowindex:rowindex+numentries, colindex] = self.polymers[indices]
polymertypes[rowindex:rowindex+numentries] = self.polymers[indices]
if column == 'Additives':
self.finalData[rowindex:rowindex+numentries, colindex] = self.additives[indices]
finalData[rowindex:rowindex+numentries, colindex-1] = self.additives[indices]
if column == 'Long Size (µm)':
self.finalData[rowindex:rowindex+numentries, colindex] = longSize[indices]
finalData[rowindex:rowindex+numentries, colindex-1] = longSize[indices]
if column == 'Short Size (µm)':
self.finalData[rowindex:rowindex+numentries, colindex] = shortSize[indices]
finalData[rowindex:rowindex+numentries, colindex-1] = shortSize[indices]
if column == 'Area (µm²)':
self.finalData[rowindex:rowindex+numentries, colindex] = area[indices]
if column == 'HQI':
self.finalData[rowindex:rowindex+numentries, colindex] = self.hqis[indices]
finalData[rowindex:rowindex+numentries, colindex-1] = area[indices]
# hit quality index array does not match the data size if particles have been combined
#if column == 'HQI':
# finalData[rowindex:rowindex+numentries, colindex-1] = self.hqis[indices]
if '> 100 µm' in requiredcolumns:
##append size classes
numPrevCols = len(requiredcolumns) - 6 #number of previous columns
for tableindex, dataindex in enumerate(indices):
numPrevCols = len(requiredcolumns) - 1 - len(self.sizeClasses) #number of previous columns
for tableindex, dataindex in enumerate(np.arange(len(indices))[indices]):
for classindex in range(len(self.sizeClasses)):
upLimit = self.sizeClasses[classindex]
if classindex == 0: lowLimit = 0
......@@ -994,9 +970,9 @@ class ExpExcelDialog(QtWidgets.QWidget):
curSize = self.sizes[dataindex]
if curSize > lowLimit and curSize <= upLimit:
self.finalData[rowindex+tableindex, numPrevCols + classindex] = np.int(1)
finalData[rowindex+tableindex, numPrevCols + classindex] = np.int(1)
else:
self.finalData[rowindex+tableindex, numPrevCols + classindex] = np.int(0)
finalData[rowindex+tableindex, numPrevCols + classindex] = np.int(0)
rowindex = rowindex + numentries
......@@ -1014,32 +990,29 @@ class ExpExcelDialog(QtWidgets.QWidget):
writer = pd.ExcelWriter(xlsname, engine = 'xlsxwriter')
df = pd.DataFrame(self.finalData)
df.to_excel(writer, sheet_name = 'Individual Particles', header = False, index = False)
df = pd.DataFrame(finalData, columns=requiredcolumns[1:])
df.insert(0, 'Polymer Type', polymertypes)
df.to_excel(writer, sheet_name = 'Individual Particles', index = False)
if '> 100 µm' in requiredcolumns:
#generate particle statistics report
report = [['Polymer Type', '0 - 5 µm', '5 - 10 µm', '10 - 20 µm', '20 - 50 µm', '50 - 100 µm', '> 100 µm']]
header = ['0 - 5 µm', '5 - 10 µm', '10 - 20 µm', '20 - 50 µm', '50 - 100 µm', '> 100 µm']
index = np.unique(self.polymers)
particleclasses = []
for polymer in np.unique(self.polymers):
for polymer in index:
indices = np.where(self.polymers == polymer)[0]
classes = np.array([0, 0, 0, 0, 0, 0])
for size in self.sizes[indices]:
if size < 5: classes[0] += 1
elif size < 10: classes[1] += 1
elif size < 20: classes[2] += 1
elif size < 50: classes[3] += 1
elif size < 100: classes[4] += 1
else: classes[5] += 1
report.append([polymer, classes[0], classes[1], classes[2], classes[3], classes[4], classes[5]])
sortind = np.searchsorted([5,10,20,50,100], self.sizes[indices], 'right')
classes = np.bincount(sortind, minlength=6)
particleclasses.append(classes)
report = pd.DataFrame(np.array(report))
report.to_excel(writer, sheet_name = 'Particle Statistics', header = False, index = False)
QtWidgets.QMessageBox.about(self, 'Done!', 'Particle Data exported')
def closeEvent(self, event):
if self.expWin is not None:
self.expWin.close()
event.accept()
particleclasses = np.array(particleclasses)
report = pd.DataFrame(np.array(particleclasses), columns=header,
dtype=int)
report.insert(0, 'Polymer Type', index)
report.insert(len(report.columns), 'Sum total', particleclasses.sum(axis=1))
report.to_excel(writer, sheet_name = 'Particle Statistics', index=False)
writer.save()
self.accept()
class AdditiveViewer(QtWidgets.QWidget):
......@@ -1066,6 +1039,6 @@ if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
meas = ParticleAnalysis(None)
meas.showMaximized()
ret = app.exec_()
return app.exec_()
run()
\ No newline at end of file
......@@ -52,6 +52,8 @@ class ParticleEditor(object):
def combineParticles(self, contourIndices, new_assignment):
if new_assignment == 'other':
new_assignment = self.getNewEntry()
if new_assignment is None:
return
contourIndices = sorted(contourIndices) #we want to keep the contour with lowest index
print('merging contours:', contourIndices)
......@@ -80,7 +82,10 @@ class ParticleEditor(object):
img = np.uint8(cv2.morphologyEx(img, cv2.MORPH_CLOSE, np.ones((3, 3))))
temp, contours, hierarchy = cv2.findContours(img, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
if cv2.__version__ > '3.5':
contours, hierarchy = cv2.findContours(img, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
else:
temp, contours, hierarchy = cv2.findContours(img, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
newContour = contours[0]
stats = self.characterizeParticle(newContour)
......@@ -139,6 +144,8 @@ class ParticleEditor(object):
def reassignParticles(self, contourindices, new_assignment):
if new_assignment == 'other':
new_assignment = self.getNewEntry()
if new_assignment is None:
return
self.createSafetyBackup()
print(f'reassigning indices {contourindices} into {new_assignment}')
......
......@@ -197,7 +197,10 @@ class Segmentation(object):
def erodeConvexDefects(self, thresh, numiter):
thresh = cv2.copyMakeBorder(thresh, 1, 1, 1, 1, 0)
for iterations in range(numiter):
thresh2, contours, hierarchy = cv2.findContours(thresh.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
if cv2.__version__ > '3.5':
contours, hierarchy = cv2.findContours(thresh.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
else:
thresh2, contours, hierarchy = cv2.findContours(thresh.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
for cnt in contours:
hull = cv2.convexHull(cnt, returnPoints = False)
defects = cv2.convexityDefects(cnt, hull)
......@@ -387,7 +390,11 @@ class Segmentation(object):
if return_step=="watershed":
return np.uint8(255*(markers!=0)), 0
temp, contours, hierarchy = cv2.findContours(markers, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
if cv2.__version__ > '3.5':
contours, hierarchy = cv2.findContours(markers, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
else:
temp, contours, hierarchy = cv2.findContours(markers, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
print("contours")
if self.cancelcomputation:
return None, None, None
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment