'''deconvolving 2D images''' import numpy as np import h5py import argparse from scipy.signal import convolve from scipy.ndimage import gaussian_filter from deconvolve_func import DeconvolveWithBead as DWB parser = argparse.ArgumentParser() parser.add_argument('bead', type = str, help = "original file") parser.add_argument('--output', type = str, required = True, help = "file name for the output of images") parser.add_argument('-pd', '--plot_data', type = str, required = True, help = "data storage file name, iterations added automatically") parser.add_argument('-it', '--iterations', type = int, required = True, help = "nr of iterations") parser.add_argument('-in', '--intensity', type = float, nargs = '+', required = True, help = "image intensity; division of the signal by intensity") parser.add_argument('-k', '--kernel', type = float, nargs = '+', required = True, help = "kernel g sigma values") parser.add_argument('--vox', type = float, nargs = '+', default = [1.0, 1.0, 1.0], help = "voxel values in micrometers, default [1.0, 1.0, 1.0]") args = parser.parse_args() #--------- importing the image, that will become the GROUND TRUTH ------ with h5py.File(args.bead, 'r') as hdf5_file: original = hdf5_file['t0/channel0'][:] print("algse pildi integraal", np.sum(original)) #--------- creating the 2D gaussian PSF---------------------------------- points = np.max(original) point_sources = np.zeros(original.shape) center_x = original.shape[0] // 2 center_y = original.shape[1] // 2 point_sources[center_x, center_y] = points #point_sources[90, 110] = points psf = gaussian_filter(point_sources, sigma = 2.4) print("psf integraal", np.sum(psf)) if not np.isclose(np.sum(psf), 1.0, atol=1e-6): psf /= np.sum(psf) # normaliseerin, et piksli väärtused oleksid samas suurusjärgus print("psf-integraal-uuesti", np.sum(psf)) #-------------- DECONVOLUTION-------------------------------------------------------------- with h5py.File(args.output, 'w') as hdf5_file: 'erinevad mürad ### signal intensity #################################################' intensity = args.intensity for i in intensity: scaled_original = original / i if i!=0 else original.copy() scaled_original = scaled_original.astype(np.float64) image = gaussian_filter(scaled_original, sigma = 2.4)#convolve(scaled_original, psf, mode='same') image[image <= 0] = 0 "rakendan pildile Poissoni müra #################################################" if i == 0: noisy_image = image.copy() else: noisy_image = np.random.poisson(lam = image, size = image.shape).astype(np.float64) hdf5_file.create_dataset(f"noise_level_{i:08.3f}", data = noisy_image) hdf5_file.create_dataset(f"scaled_original_{i:08.3f}", data = scaled_original) '''kernelid erinevate sigmadega - pildi taastamine ##################''' g_sigma = args.kernel vox = np.array(args.vox) mses = [] mssim = [] for j in g_sigma: image_kerneled = gaussian_filter(noisy_image, sigma = j) psf_kerneled = gaussian_filter(psf, sigma = j) print(np.sum(image_kerneled),np.sum(noisy_image), np.sum(psf_kerneled), np.sum(psf)) 'dekonvolveerin - taastan pilti ######################################' deconv = DWB(image = image_kerneled, voxel_size = vox, bead = scaled_original) dec = deconv.deconvolve_rl(n_iterations = args.iterations, bead = scaled_original, psf = psf_kerneled, im = image_kerneled) hdf5_file.create_dataset(f'intensity_{i:08.3f}/SIGMA_{j:03.1f}', data = dec[0]) hdf5_file.create_dataset(f"intensity_{i:08.3f}/minimum_image_{j:03.1f}", data = dec[4]) mses.append(dec[1]) mssim.append(dec[2]) print("---------------lõpp-pildi integraal", np.sum(dec[0])) data = np.column_stack((np.column_stack(mses),np.column_stack(mssim))) np.save(f"{args.plot_data}_{args.iterations}_{i:08.3f}.npy", np.column_stack((dec[3], data)))