Commit 405a0850 authored by Josef Brandt's avatar Josef Brandt

Basic FTIR functions

parent c5a3864a
......@@ -24,3 +24,5 @@ ramancom/renishawtesting.py
*.lib
*.obj
*.pyc
"""
GEPARD - Gepard-Enabled PARticle Detection
Copyright (C) 2018 Lars Bittrich and Josef Brandt, Leibniz-Institut für
Polymerforschung Dresden e. V. <bittrich-lars@ipfdd.de>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program, see COPYING.
If not, see <https://www.gnu.org/licenses/>.
"""
import os
import numpy as np
class AdvancedWITecSpectra(object):
"""
Handles Spectra formatting and storage when using the advanced "silent spectrum" option in the WITec COM interface
:return:
"""
def __init__(self):
super(AdvancedWITecSpectra, self).__init__()
self.dsetpath = None
self.tmpspecpath = None
self.curSpecIndex = None
self.excitWavel = None
self.spectraBatchSize = None
def setDatasetPath(self, path):
self.dsetpath = path
def setSpectraBatchSize(self, batchSize):
self.spectraBatchSize = batchSize
def createTmpSpecFolder(self):
assert self.dsetpath is not None
self.tmpspecpath = os.path.join(self.dsetpath, 'spectra')
if not os.path.exists(self.tmpspecpath):
os.mkdir(self.tmpspecpath)
def registerNewSpectrum(self, specString, specIndex):
wavenumbers, averaged_counts = self.deassembleSpecString(specString)
if specIndex == 0:
fname = os.path.join(self.tmpspecpath, 'Wavenumbers.npy')
np.save(fname, wavenumbers)
fname = os.path.join(self.tmpspecpath, f'Spectrum ({specIndex}).npy')
np.save(fname, averaged_counts)
self.curSpecIndex = specIndex
def createSummarizedSpecFiles(self):
allSpectra = self.getAllSpectra()
allspecfname = os.path.join(self.dsetpath, 'spectra.npy')
np.save(allspecfname, allSpectra)
self.createTrueMatchTxt(allSpectra, self.excitWavel)
def deassembleSpecString(self, specString):
keywordLines = self.getKeyWordLines(specString)
try:
specSize = self.getSpecSize(specString, keywordLines['SpectrumSize'][0])
except:
print(keywordLines)
raise
wavenumbers = self.getWavenumbers(specString, keywordLines['[XData]'][0], specSize)
xDataKind = self.getXDataKind(specString, keywordLines['XDataKind'][0])
self.excitWavel = self.getExcitationWavelength(specString, keywordLines['ExcitationWavelength'][0])
if xDataKind == 'nm':
wavenumbers = self.convertWavenumbersFrom_nm_to_Percm(wavenumbers, self.excitWavel)
else:
print('warning, unexpected xDataKind:', xDataKind)
print('please check how to deal with it!!!')
assert False
averaged_counts = self.getAveragedSpectra(specString, keywordLines['SpectrumData'], specSize)
return wavenumbers, averaged_counts
def getKeyWordLines(self, specString):
keywordLines = {'[WITEC_TRUEMATCH_ASCII_HEADER]': [],
'[XData]': [],
'ExcitationWavelength': [],
'SpectrumSize': [],
'XDataKind': [],
'SpectrumHeader': [],
'SampleMetaData': [],
'SpectrumData': []}
for index, line in enumerate(specString):
for key in keywordLines.keys():
if line.find(key) != -1:
keywordLines[key].append(index)
return keywordLines
def getSpecSize(self, specString, specSizeIndex):
line = specString[specSizeIndex]
specSize = [int(s) for s in line.split() if self.isNumber(s)]
assert len(specSize) == 1
return specSize[0]
def getExcitationWavelength(self, specString, excitWavenumIndex):
line = specString[excitWavenumIndex]
excitWavel = [float(s) for s in line.split() if self.isNumber(s)]
assert len(excitWavel) == 1
return excitWavel[0]
def getXDataKind(self, specString, xDataKindIndex):
line = specString[xDataKindIndex]
return line.split()[-1]
def getWavenumbers(self, specString, startXDataIndex, specSize):
wavenumbers = []
curIndex = startXDataIndex+1
curLine = specString[curIndex]
while self.isNumber(curLine):
wavenumbers.append(float(curLine))
curIndex += 1
curLine = specString[curIndex]
assert len(wavenumbers) == specSize
return wavenumbers
def convertWavenumbersFrom_nm_to_Percm(self, wavenumbers, excit_nm):
newWavenumbers = []
for abs_nm in wavenumbers:
raman_shift = 1E7/excit_nm - 1E7/abs_nm
newWavenumbers.append(raman_shift)
return newWavenumbers
def getAveragedSpectra(self, specString, startIndices, specSize):
startIndices = [i+1 for i in startIndices] #the spectrum starts one line AFTER the SpectrumData-Tag
spectrum = []
for index in range(specSize):
curSlice = [float(specString[index + startIndex]) for startIndex in startIndices]
spectrum.append(np.mean(curSlice))
return spectrum
def getAllSpectra(self):
numSpectra = self.curSpecIndex + 1
wavenumbers = np.load(os.path.join(self.tmpspecpath, 'Wavenumbers.npy'))
allSpectra = np.zeros((wavenumbers.shape[0], numSpectra+1))
allSpectra[:, 0] = wavenumbers
for i in range(numSpectra):
curSpecPath = os.path.join(self.tmpspecpath, f'Spectrum ({i}).npy')
allSpectra[:, i+1] = np.load(curSpecPath )
os.remove(curSpecPath)
return allSpectra
def createTrueMatchTxt(self, allSpectra, wavelength):
def writeHeader(fp):
fp.write('[WITEC_TRUEMATCH_ASCII_HEADER]\n\r')
fp.write('Version = 2.0\n\r\n\r')
def writeWavenumbers(fp, wavenumbers):
fp.write('[XData]\n\r')
for line in wavenumbers:
fp.write(str(line) + '\n\r')
def writeSpectrum(fp, intensities):
fp.write('\n\r')
fp.write('[SpectrumHeader]\n\r')
fp.write(f'Title = Spectrum {specIndex} \n\r')
fp.write(f'ExcitationWavelength = {wavelength}\n\r')
fp.write(f'SpectrumSize = {specSize}\n\r')
fp.write('XDataKind = 1/cm\n\r\n\r')
fp.write('[SampleMetaData]\n\r')
fp.write(f'int Spectrum_Number = {specIndex}\n\r\n\r')
fp.write('[SpectrumData]\n\r')
for line in intensities:
fp.write(str(line) + '\n\r')
wavenumbers = allSpectra[:, 0]
spectra = allSpectra[:, 1:]
specSize = allSpectra.shape[0]
del allSpectra
numSpectra = spectra.shape[1]
numBatches = int(np.ceil(numSpectra/self.spectraBatchSize))
for batchIndex in range(numBatches):
outName = os.path.join(self.dsetpath, f'SpectraForTrueMatch {batchIndex}.txt')
if os.path.exists(outName):
os.remove(outName)
if batchIndex < numBatches-1:
specIndicesInBatch = np.arange(batchIndex*self.spectraBatchSize, (batchIndex+1)*self.spectraBatchSize)
else:
specIndicesInBatch = np.arange(batchIndex*self.spectraBatchSize, numSpectra)
with open(outName, 'w') as fp:
writeHeader(fp)
writeWavenumbers(fp, wavenumbers)
for specIndex in specIndicesInBatch:
spec = spectra[:, specIndex]
writeSpectrum(fp, spec)
def isNumber(self, string):
isNumber = False
try:
float(string)
isNumber = True
except ValueError:
pass
return isNumber
# -*- coding: utf-8 -*-
"""
GEPARD - Gepard-Enabled PARticle Detection
Copyright (C) 2018 Lars Bittrich and Josef Brandt, Leibniz-Institut für
Polymerforschung Dresden e. V. <bittrich-lars@ipfdd.de>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program, see COPYING.
If not, see <https://www.gnu.org/licenses/>.
"""
import numpy as np
from scipy import optimize
import cv2
from ..cythonModules.getMaxRect import findMaxRect
####################################################
'''Code taken from https://github.com/pogam/ExtractRect/blob/master/extractRect.py and modified for our use'''
####################################################
def residual(angle, img):
img_rot, rotationMatrix = rotateImageAroundAngle(img, angle)
point1, point2, width, height, max_area = findMaxRect(img_rot)
return 1/max_area
def getFinalRectangle(angle, img, shiftScaleParam):
ny = img.shape[1]
img_rot, rotationMatrix = rotateImageAroundAngle(img, angle)
point1, point2, width, height, max_area = findMaxRect(img_rot)
width *= shiftScaleParam.scale_factor
height *= shiftScaleParam.scale_factor
#invert rectangle
M_invert = cv2.invertAffineTransform(rotationMatrix)
rect_coord = [point1, [point1[0],point2[1]] ,
point2, [point2[0],point1[1]] ]
rect_coord_ori = []
for coord in rect_coord:
rect_coord_ori.append(np.dot(M_invert,[coord[0],(ny-1)-coord[1],1]))
#transform to numpy coord of input image
coord_out = []
for coord in rect_coord_ori:
coord_out.append([shiftScaleParam.scale_factor*round( coord[0],0)-shiftScaleParam.shift_x,\
shiftScaleParam.scale_factor*round((ny-1)-coord[1],0)-shiftScaleParam.shift_y])
#transpose back the original coords:
coord_out = transposeRectCoords(coord_out)
return coord_out, round(width), round(height)
def transposeRectCoords(rectCoords):
#mirror x and y:
newCoords = []
for i in range(len(rectCoords)):
newCoords.append([rectCoords[i][1], rectCoords[i][0]])
return newCoords
def addBorderToMakeImageSquare(img):
nx, ny = img.shape
if nx != ny:
n = max([nx,ny])
img_square = np.ones([n,n])
xshift = (n-nx)/2
yshift = (n-ny)/2
if yshift == 0:
img_square[int(xshift):int(xshift+nx),:] = img
else:
img_square[:,int(yshift):int(yshift+ny)] = img
else:
xshift = 0
yshift = 0
img_square = img
return img_square, xshift, yshift
def limitImageSize(img, limitSize):
if img.shape[0] > limitSize:
img_small = cv2.resize(img,(limitSize, limitSize),interpolation=0)
scale_factor = 1.*img.shape[0]/img_small.shape[0]
else:
img_small = img
scale_factor = 1
return img_small, scale_factor
def makeImageOddSized(img):
# set the input data with an odd number of point in each dimension to make rotation easier
nx,ny = img.shape
nx_extra = -nx
ny_extra = -ny
if nx%2==0:
nx+=1
nx_extra = 1
if ny%2==0:
ny+=1
ny_extra = 1
img_odd = np.ones([img.shape[0]+max([0,nx_extra]),img.shape[1]+max([0,ny_extra])], dtype=np.uint8)
img_odd[:-nx_extra, :-ny_extra] = img
return img_odd
def rotateImageAroundAngle(img, angle):
nx,ny = img.shape
rotationMatrix = cv2.getRotationMatrix2D(((nx-1)/2,(ny-1)/2),angle,1)
img_rot = np.array(cv2.warpAffine(img, rotationMatrix, img.shape, flags=cv2.INTER_NEAREST, borderValue=1))
img_rot = img_rot.astype(np.uint8)
return img_rot, rotationMatrix
def findRotatedMaximalRectangle(img, nbre_angle=4, limit_image_size=300):
img_square, shift_x, shift_y = addBorderToMakeImageSquare(img)
img_small, scale_factor = limitImageSize(img_square, limit_image_size)
img_odd = makeImageOddSized(img_small)
nx,ny = nx_odd, ny_odd = img_odd.shape
shiftScaleParam = ShiftAndScaleParam(shift_x, shift_y, scale_factor)
# angle_range = ([(90.,180.),])
angle_range = ([(0., 90.), ])
coeff1 = optimize.brute(residual, angle_range, args=(img_odd,), Ns=nbre_angle, finish=None)
popt = optimize.fmin(residual, coeff1, args=(img_odd,), xtol=5, ftol=1.e-5, disp=False)
opt_angle = popt[0]
rectangleCoords, width, height = getFinalRectangle(opt_angle, img_odd, shiftScaleParam)
return rectangleCoords, opt_angle, width, height
class ShiftAndScaleParam(object):
def __init__(self, shift_x=0, shift_y=0, scale_factor=1):
self.shift_x = shift_x
self.shift_y = shift_y
self.scale_factor = scale_factor
\ No newline at end of file
......@@ -29,6 +29,7 @@ class Particle(object):
self.height = None
self.area = None
self.contour = None
self.aperture = None
self.measurements = []
self.color = None
self.shape = None
......
......@@ -26,17 +26,34 @@ from copy import deepcopy
from .particleClassification.colorClassification import ColorClassifier
from .particleClassification.shapeClassification import ShapeClassifier
from .ftirAperture import findRotatedMaximalRectangle
from ..segmentation import closeHolesOfSubImage
from ..errors import InvalidParticleError
from ..helperfunctions import cv2imread_fix
class FTIRAperture(object):
"""
Configuration for an FTIR aperture. CenterCoords, width and height in Pixels
"""
centerX: float = None
centerY: float = None
centerZ: float = None
width: float = None
height: float = None
angle: float = None
rectCoords: list = None
class ParticleStats(object):
longSize = None
shortSize = None
height = None
area = None
shape = None
color = None
longSize: float = None
shortSize: float = None
height: float = None
area: float = None
shape: str = None
color: str = None
aperture: FTIRAperture = None
def particleIsValid(particle):
if particle.longSize == 0 or particle.shortSize == 0:
......@@ -45,7 +62,8 @@ def particleIsValid(particle):
if cv2.contourArea(particle.contour) == 0:
return False
return True
def updateStatsOfParticlesIfNotManuallyEdited(particleContainer):
dataset = particleContainer.datasetParent
fullimage = loadFullimageFromDataset(dataset)
......@@ -59,6 +77,23 @@ def updateStatsOfParticlesIfNotManuallyEdited(particleContainer):
newStats = getParticleStatsWithPixelScale(particle.contour, dataset, fullimage, zimg)
particle.__dict__.update(newStats.__dict__)
def getFTIRAperture(partImg: np.ndarray) -> FTIRAperture:
rectPoints, angle, width, heigth = findRotatedMaximalRectangle(partImg, nbre_angle=4, limit_image_size=300)
xvalues: list = [i[0] for i in rectPoints]
yvalues: list = [i[1] for i in rectPoints]
aperture = FTIRAperture()
aperture.height = heigth
aperture.width = width
aperture.angle = angle
aperture.centerX = int(round(min(xvalues) + (max(xvalues) - min(xvalues)) / 2))
aperture.centerY = int(round(min(yvalues) + (max(yvalues) - min(yvalues)) / 2))
aperture.rectCoords = rectPoints
return aperture
def getParticleStatsWithPixelScale(contour, dataset, fullimage=None, zimg=None):
if fullimage is None:
fullimage = loadFullimageFromDataset(dataset)
......@@ -84,17 +119,31 @@ def getParticleStatsWithPixelScale(contour, dataset, fullimage=None, zimg=None):
newStats.longSize *= pixelscale
newStats.shortSize *= pixelscale
partImg = getParticleImageFromFullimage(cnt, fullimage)
partImg, extrema = getParticleImageFromFullimage(cnt, fullimage)
newStats.color = getParticleColor(partImg)
padding: int = int(2)
imgforAperture = np.zeros((partImg.shape[0]+2*padding, partImg.shape[1]+2*padding))
imgforAperture[padding:partImg.shape[0]+padding, padding:partImg.shape[1]+padding] = np.mean(partImg, axis=2)
imgforAperture[imgforAperture > 0] = 1 # convert to binary image
imgforAperture = np.uint8(1 - imgforAperture) # background has to be 1, particle has to be 0
aperture: FTIRAperture = getFTIRAperture(imgforAperture)
aperture.centerX = aperture.centerX + extrema[0] - padding
aperture.centerY = aperture.centerY + extrema[2] - padding
aperture.centerZ = newStats.height
newStats.aperture = aperture
return newStats
def getFibreDimension(contour):
longSize = cv2.arcLength(contour, True)/2
img = contoursToImg([contour])[0]
dist = cv2.distanceTransform(img, cv2.DIST_L2, 3)
maxThickness = np.max(dist)*2
return longSize, maxThickness
def getParticleColor(imgRGB, colorClassifier=None):
img = cv2.cvtColor(imgRGB, cv2.COLOR_RGB2HSV_FULL)
meanHSV = cv2.mean(img)
......@@ -103,14 +152,16 @@ def getParticleColor(imgRGB, colorClassifier=None):
color = colorClassifier.classifyColor(meanHSV)
return color
def getParticleShape(contour, particleHeight, shapeClassifier=None):
if shapeClassifier is None:
shapeClassifier = ShapeClassifier()
shape = shapeClassifier.classifyShape(contour, particleHeight)
return shape
def getParticleHeight(contour, fullZimg, dataset):
zimg = getParticleImageFromFullimage(contour, fullZimg)
zimg, _extrema = getParticleImageFromFullimage(contour, fullZimg)
if zimg.shape[0] == 0 or zimg.shape[1] == 0:
raise InvalidParticleError
......@@ -122,6 +173,7 @@ def getParticleHeight(contour, fullZimg, dataset):
height = avg_ZValue/255.*(z1-z0) + z0
return height
def getContourStats(cnt):
##characterize particle
if cnt.shape[0] >= 5: ##at least 5 points required for ellipse fitting...
......@@ -138,10 +190,12 @@ def getContourStats(cnt):
return long, short, area
def mergeContours(contours):
img, xmin, ymin, padding = contoursToImg(contours)
return imgToCnt(img, xmin, ymin, padding)
def getParticleImageFromFullimage(contour, fullimage):
contourCopy = deepcopy(contour)
xmin, xmax, ymin, ymax = getContourExtrema(contourCopy)
......@@ -159,8 +213,9 @@ def getParticleImageFromFullimage(contour, fullimage):
img[mask == 0] = 0
img = np.array(img, dtype = np.uint8)
return img
return img, (xmin, xmax, ymin, ymax)
def contoursToImg(contours, padding=0):
contourCopy = deepcopy(contours)
xmin, xmax, ymin, ymax = getContourExtrema(contourCopy)
......@@ -181,6 +236,7 @@ def contoursToImg(contours, padding=0):
img = np.uint8(cv2.morphologyEx(img, cv2.MORPH_CLOSE, np.ones((3, 3))))
return img, xmin, ymin, padding
def imgToCnt(img, xmin, ymin, padding=0):
def getContour(img, contourMode):
if cv2.__version__ > '3.5':
......@@ -214,6 +270,7 @@ def imgToCnt(img, xmin, ymin, padding=0):
return contour
def getContourExtrema(contours):
try:
cnt = np.vstack(tuple(contours))
......@@ -228,6 +285,7 @@ def getContourExtrema(contours):
return xmin, xmax, ymin, ymax
def getParticleCenterPoint(contour):
img, xmin, ymin, padding = contoursToImg(contour)
dist = cv2.distanceTransform(img, cv2.DIST_L2, 3)
......@@ -238,8 +296,10 @@ def getParticleCenterPoint(contour):
y += ymin
return x, y
def loadFullimageFromDataset(dataset):
return cv2imread_fix(dataset.getImageName())
def loadZValImageFromDataset(dataset):
return cv2imread_fix(dataset.getZvalImageName(), cv2.IMREAD_GRAYSCALE)
\ No newline at end of file
......@@ -185,12 +185,20 @@ class ParticleContainer(object):
particle = self.getParticleOfIndex(partIndex)
return particle.getParticleAssignment()
def getMeasurementPixelCoords(self):
coords = []
def getMeasurementPixelCoords(self) -> list:
coords: list = []
for meas in self.measurements:
coords.append([meas.pixelcoord_x, meas.pixelcoord_y])
return coords
def getApertureCenterCoords(self) -> list:
coords: list = []
for particle in self.particles:
assert particle.aperture is not None, 'Aperture for particle was not set!'
coords.append([particle.aperture.centerX, particle.aperture.centerY])
return coords
def getNumberOfParticlesOfAssignment(self, assignment):
num = 0
for particle in self.particles:
......
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
GEPARD - Gepard-Enabled PARticle Detection
Copyright (C) 2018 Lars Bittrich and Josef Brandt, Leibniz-Institut für
Polymerforschung Dresden e. V. <bittrich-lars@ipfdd.de>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or