2025-10-14 09:33:38 +03:00
''' deconvolving 2D images '''
import numpy as np
import h5py
import argparse
from scipy . signal import convolve
from scipy . ndimage import gaussian_filter
from deconvolve_func import DeconvolveWithBead as DWB
parser = argparse . ArgumentParser ( )
parser . add_argument ( ' bead ' , type = str , help = " original file " )
2025-10-20 16:17:28 +03:00
parser . add_argument ( ' -o ' , ' --output ' , type = str , required = True , help = " file .h5 name for the output of images " )
parser . add_argument ( ' -pd ' , ' --plot_data ' , type = str , required = True , help = " .npy data storage file name, iterations added automatically " )
2025-10-14 09:33:38 +03:00
parser . add_argument ( ' -it ' , ' --iterations ' , type = int , required = True , help = " nr of iterations " )
2025-10-20 16:17:28 +03:00
parser . add_argument ( ' -in ' , ' --intensity ' , type = float , nargs = ' + ' , required = True , help = " [x y ..], space as delimter, image intensity; division of the signal by intensity " )
parser . add_argument ( ' -k ' , ' --kernel ' , type = float , nargs = ' + ' , required = True , help = " [x y ..], space as delimter, kernel g sigma values " )
2025-10-14 09:33:38 +03:00
parser . add_argument ( ' --vox ' , type = float , nargs = ' + ' , default = [ 1.0 , 1.0 , 1.0 ] , help = " voxel values in micrometers, default [1.0, 1.0, 1.0] " )
args = parser . parse_args ( )
#--------- importing the image, that will become the GROUND TRUTH ------
with h5py . File ( args . bead , ' r ' ) as hdf5_file :
2025-10-20 16:17:28 +03:00
# dset = hdf5_file['t0/channel0'] # allows acces to attributes
# vox = reverse(dset.attrs.get('element_size_um'))
# ch1 = dset[:] # reads the whole data from dataset
# ch2 = hdf5_file['t0/channel1'][:]
# ch3 = hdf5_file['t0/channel2'][:]
#original = hdf5_file['t0/channel0'][:]
group = hdf5_file [ ' t0 ' ]
ch = [ ]
for ch_name in group :
dset = group [ ch_name ]
vox = dset . attrs . get ( ' element_size_um ' )
vox = vox [ : : - 1 ] # reversing, because z,y,x
channel = dset [ : ]
ch . append ( channel )
#print("algse pildi integraal", np.sum(original))
2025-10-14 09:33:38 +03:00
#--------- creating the 2D gaussian PSF----------------------------------
2025-10-20 16:17:28 +03:00
points = np . max ( ch [ 0 ] )
point_sources = np . zeros ( ch [ 0 ] . shape )
2025-10-14 09:33:38 +03:00
2025-10-20 16:17:28 +03:00
center_x = ch [ 0 ] . shape [ 0 ] / / 2
center_y = ch [ 0 ] . shape [ 1 ] / / 2
2025-10-14 09:33:38 +03:00
point_sources [ center_x , center_y ] = points
#point_sources[90, 110] = points
psf = gaussian_filter ( point_sources , sigma = 2.4 )
print ( " psf integraal " , np . sum ( psf ) )
if not np . isclose ( np . sum ( psf ) , 1.0 , atol = 1e-6 ) :
psf / = np . sum ( psf ) # normaliseerin, et piksli väärtused oleksid samas suurusjärgus
print ( " psf-integraal-uuesti " , np . sum ( psf ) )
#-------------- DECONVOLUTION--------------------------------------------------------------
with h5py . File ( args . output , ' w ' ) as hdf5_file :
' erinevad mürad ### signal intensity ################################################# '
intensity = args . intensity
2025-10-20 16:17:28 +03:00
for w , ch in enumerate ( ch ) :
for i in intensity :
scaled_original = ch / i if i != 0 else ch . copy ( )
scaled_original = scaled_original . astype ( np . float64 )
image = gaussian_filter ( scaled_original , sigma = 2.4 ) #convolve(scaled_original, psf, mode='same')
image [ image < = 0 ] = 0
" rakendan pildile Poissoni müra ################################################# "
if i == 0 :
noisy_image = image . copy ( )
else :
noisy_image = np . random . poisson ( lam = image , size = image . shape ) . astype ( np . float64 )
hdf5_file . create_dataset ( f " ch { w : 03d } /noise_level_ { i : 08.3f } " , data = noisy_image )
hdf5_file . create_dataset ( f " ch { w : 03d } /scaled_original_ { i : 08.3f } " , data = scaled_original )
''' kernelid erinevate sigmadega - pildi taastamine ################## '''
g_sigma = args . kernel
if vox is None :
vox = np . array ( args . vox )
else :
vox = np . array ( vox )
mses = [ ]
mssim = [ ]
2025-10-24 17:07:41 +03:00
2025-10-20 16:17:28 +03:00
for j in g_sigma :
2025-10-24 17:07:41 +03:00
MSE = np . sum ( ( noisy_image - scaled_original ) * * 2 ) / noisy_image . size # adding image and noisy image mse
2025-10-20 16:17:28 +03:00
image_kerneled = gaussian_filter ( noisy_image , sigma = j )
psf_kerneled = gaussian_filter ( psf , sigma = j )
print ( np . sum ( image_kerneled ) , np . sum ( noisy_image ) , np . sum ( psf_kerneled ) , np . sum ( psf ) )
' dekonvolveerin - taastan pilti ###################################### '
deconv = DWB ( image = image_kerneled , voxel_size = vox , bead = scaled_original )
dec = deconv . deconvolve_rl ( n_iterations = args . iterations ,
bead = scaled_original ,
psf = psf_kerneled ,
im = image_kerneled )
hdf5_file . create_dataset ( f ' ch { w : 03d } /intensity_ { i : 08.3f } /SIGMA_ { j : 03.1f } ' , data = dec [ 0 ] )
hdf5_file . create_dataset ( f " ch { w : 03d } /intensity_ { i : 08.3f } /minimum_image_ { j : 03.1f } " , data = dec [ 4 ] )
2025-10-24 17:07:41 +03:00
dec [ 1 ] [ - 1 ] = MSE
meansquare = np . concatenate ( ( [ dec [ 1 ] [ - 1 ] ] , dec [ 1 ] [ : - 1 ] ) )
mses . append ( meansquare )
#mses.append(dec[1])
2025-10-20 16:17:28 +03:00
mssim . append ( dec [ 2 ] )
print ( " ---------------lõpp-pildi integraal " , np . sum ( dec [ 0 ] ) )
data = np . column_stack ( ( np . column_stack ( mses ) , np . column_stack ( mssim ) ) )
np . save ( f " { args . plot_data } _ { args . iterations } _ch { w : 03d } _ { i : 08.3f } .npy " , np . column_stack ( ( dec [ 3 ] , data ) ) )
2025-10-14 09:33:38 +03:00