Skip to content
Snippets Groups Projects
Commit b8c78948 authored by Matthieu Muller's avatar Matthieu Muller
Browse files

Merge branch 'master' into 'master'

bach_antoine_im_analysis_project

See merge request mullemat/sicom_image_analysis_project!4
parents 25942cba e852a22d
No related branches found
No related tags found
1 merge request!4bach_antoine_im_analysis_project
"""A file containing a (pretty useless) reconstruction.
It serves as example of how the project works.
This file should NOT be modified.
"""
import numpy as np
from src.forward_model import CFA
import cv2
def MLRI_interpolation(op: CFA, y: np.ndarray) -> np.ndarray:
z = op.adjoint(y)
res = np.empty(op.input_shape)
if op.cfa == 'bayer':
green_estimation=green_estimate(y, z)
red_estimation=red_estimate(z,green_estimation)
blue_estimation=blue_estimate(z,green_estimation)
res[:, :, 0] = red_estimation
res[:, :, 1] = green_estimation
res[:, :, 2] = blue_estimation
else :
new_z = cv2.resize(z, (z.shape[1] // 2, z.shape[0] // 2), interpolation=cv2.INTER_AREA)
new_y=np.sum(new_z, axis=2)
new_z_upsampled = np.repeat(np.repeat(new_z, 2, axis=0), 2, axis=1)
new_y_upsampled=np.sum(new_z_upsampled, axis=2)
mask_out=cv2.merge([y-new_y_upsampled]*3)
res_downsampled = np.zeros_like(new_z)
green_estimation=green_estimate(new_y,new_z)
red_estimation=red_estimate(new_z,green_estimation)
blue_estimation=blue_estimate(new_z,green_estimation)
res_downsampled[:, :, 0] = red_estimation
res_downsampled[:, :, 1] = green_estimation
res_downsampled[:, :, 2] = blue_estimation
res_downsampled_upsampled= np.repeat(np.repeat(res_downsampled, 2, axis=0), 2, axis=1)
res=res_downsampled_upsampled+mask_out
return res
def green_estimate(y : np.ndarray, z : np.ndarray)-> np.ndarray:
# estimation of green by gradient based threshold free color filter array interpolation(GBTF)
(grh_est, grv_est,
gbh_est, gbv_est,
rgh_est, rgv_est,
bgh_est, bgv_est)=hamilton_and_adam_interpolation(y)
(HCDE,VCDE)=color_difference(grh_est, grv_est ,
gbh_est , gbv_est ,
rgh_est , rgv_est ,
bgh_est , bgv_est, z)
(gradient_H,gradient_V)=gradient_compute(HCDE, VCDE, y)
Weight_W, Weight_E, Weight_S, Weight_N=directionnal_weight(gradient_H, gradient_V)
green_estimation=final_estimation(Weight_W, Weight_E, Weight_S, Weight_N,
HCDE, VCDE,y, z)
return green_estimation
def red_estimate(z : np.ndarray, green_estimation : np.ndarray)-> np.ndarray:
# estimation of red by minimizing laplacian residuals interpolation (MLRI)
return red_blue_estimate(z, green_estimation, 0)
def blue_estimate(z : np.ndarray, green_estimation : np.ndarray)-> np.ndarray:
# estimation of blue by minimizing laplacian residuals interpolation (MLRI)
return red_blue_estimate(z, green_estimation, 2)
def red_blue_estimate(z : np.ndarray, green_estimation : np.ndarray, blue_red)-> np.ndarray:
# estimation of blue or red by minimizing laplacian residuals interpolation
# laplacian interpolation
F = np.array([
[0, 0, -1, 0, 0],
[0, 0, 0, 0, 0],
[-1, 0, 4, 0, -1],
[0, 0, 0, 0, 0],
[0, 0, -1, 0, 0]
], dtype=np.float32)
lap_rb = cv2.filter2D(z[:,:,blue_red], -1, F, borderType=cv2.BORDER_REPLICATE)
lap_green = cv2.filter2D(green_estimation * (z[:,:,blue_red]!=0), -1, F, borderType=cv2.BORDER_REPLICATE)
#estimate the residuals
mean_a,mean_b = guided_filter(green_estimation*(z[:,:,blue_red]!=0), z[:,:,blue_red],
lap_green*(z[:,:,blue_red]!=0) , lap_rb*(z[:,:,blue_red]!=0))
tentativeRB = mean_a * green_estimation + mean_b
residualRB = (z[:,:,blue_red]!=0) * (z[:,:,blue_red] - tentativeRB)
# Bilinear interpolation of the residuals
H = np.array([
[1/4, 1/2, 1/4],
[1/2, 1, 1/2],
[1/4, 1/2, 1/4]
], dtype=np.float32)
residualRB2 = cv2.filter2D(residualRB, -1, H, borderType=cv2.BORDER_REPLICATE)
rb_estimation = residualRB2 + tentativeRB
return rb_estimation
def hamilton_and_adam_interpolation(y : np.ndarray)-> (np.ndarray,np.ndarray,
np.ndarray,np.ndarray,
np.ndarray,np.ndarray,
np.ndarray,np.ndarray):
# interpolation method to all pixels in both vertical and horizontal directions.
grh_est=np.zeros_like(y)
grv_est=np.zeros_like(y)
gbh_est=np.zeros_like(y)
gbv_est=np.zeros_like(y)
rgh_est=np.zeros_like(y)
rgv_est=np.zeros_like(y)
bgh_est=np.zeros_like(y)
bgv_est=np.zeros_like(y)
rows,cols=y.shape
for i in range(rows):
for j in range(cols):
if (i%2==0 and j%2!=0):
# estimate horizontal green with red
if (j==1):
grh_est[i,j] =0
elif (j==cols-2) or (j==cols-1):
grh_est[i,j] = 2*y[i,j-2]-y[i,j-4]
else:
grh_est[i,j] = (y[i, j-1] + y[i, j+1]) / 2 + (2 * y[i, j] - y[i, j-2] - y[i, j+2]) / 4
# estimate vertical green with red
if (i==0):
grv_est[i,j] = 0
elif (i==rows-2)or (i==rows-1):
grv_est[i,j] = 2*y[i-2,j]-y[i-4,j]
else:
grv_est[i,j] = (y[i-1, j] + y[i+1, j]) / 2 + (2 * y[i, j] - y[i-2, j] - y[i+2, j]) / 4
if (i%2!=0 and j%2==0):
# estimate horizontal green with blue
if (j==0):
gbh_est[i,j] =0
elif (j==cols-2) or (j==cols-1):
gbh_est[i,j] = 2*y[i,j-2]-y[i,j-4]
else:
gbh_est[i,j] = (y[i, j-1] + y[i, j+1]) / 2 + (2 * y[i, j] - y[i, j-2] - y[i, j+2]) / 4
# estimate vertical green with blue
if (i==1):
gbv_est[i,j] = 0
elif (i==rows-2)or (i==rows-1):
gbv_est[i,j] = 2*y[i-2,j]-y[i-4,j]
else:
gbv_est[i,j] = (y[i-1, j] + y[i+1, j]) / 2 + (2 * y[i, j] - y[i-2, j] - y[i+2, j]) / 4
if (i%2==0 and j%2==0):
# estimate horizontal red with green
if (j==0):
rgh_est[i,j] = 0
elif (j==cols-2) or (j==cols-1):
rgh_est[i,j] = 2*y[i,j-2]-y[i,j-4]
else:
rgh_est[i,j] = (y[i, j-1] + y[i, j+1]) / 2 + (2 * y[i, j] - y[i, j-2] - y[i, j+2]) / 4
# estimate vertical blue with green
if (i==0):
bgv_est[i,j] = 0
elif (i==rows-2)or (i==rows-1):
bgv_est[i,j] = 2*y[i-2,j]-y[i-4,j]
else:
bgv_est[i,j] = (y[i-1, j] + y[i+1, j]) / 2 + (2 * y[i, j] - y[i-2, j] - y[i+2, j]) / 4
if (i%2!=0 and j%2!=0):
# estimate vertical red with green
if (i==1):
rgv_est[i,j] = 0
elif (i==rows-2)or (i==rows-1):
rgv_est[i,j] = 2*y[i-2,j]-y[i-4,j]
else:
rgv_est[i,j] = (y[i-1, j] + y[i+1, j]) / 2 + (2 * y[i, j] - y[i-2, j] - y[i+2, j]) / 4
# estimate horizontal blue with green
if (j==1):
bgh_est[i,j] = 0
elif (j==cols-2) or (j==cols-1):
bgh_est[i,j] = 2*y[i,j-2]-y[i,j-4]
else:
bgh_est[i,j] = (y[i, j-1] + y[i, j+1]) / 2 + (2 * y[i, j] - y[i, j-2] - y[i, j+2]) / 4
grh_est[:,1] = 2*grh_est[:,3]-grh_est[:,5]
grv_est[0,:] = 2*grv_est[2,:]-grv_est[4,:]
gbh_est[:,0] = 2*gbh_est[:,2]-gbh_est[:,4]
gbv_est[1,:] = 2*gbv_est[3,:]-gbv_est[5,:]
rgh_est[:,0] = 2*rgh_est[:,2]-rgh_est[:,4]
rgv_est[1,:] = 2*rgv_est[3,:]-rgv_est[5,:]
bgh_est[:,1] = 2*bgh_est[:,3]-bgh_est[:,5]
bgv_est[0,:] = 2*bgv_est[2,:]-bgv_est[4,:]
return grh_est, grv_est, gbh_est, gbv_est, rgh_est, rgv_est, bgh_est, bgv_est
def color_difference(grh_est : np.ndarray, grv_est : np.ndarray,
gbh_est : np.ndarray, gbv_est : np.ndarray,
rgh_est : np.ndarray, rgv_est : np.ndarray,
bgh_est : np.ndarray, bgv_est : np.ndarray,
z : np.ndarray)-> (np.ndarray, np.ndarray) :
# estimate the color difference array in horizontal and vertical directions
HCDE=z[:,:,1]-z[:,:,0]-z[:,:,2]
VCDE=HCDE.copy()
difference_rg_h=grh_est-rgh_est
difference_rg_v=grv_est-rgv_est
difference_bg_h=gbh_est-bgh_est
difference_bg_v=gbv_est-bgv_est
HCDE=HCDE+difference_rg_h+difference_bg_h
VCDE=VCDE+difference_rg_v+difference_bg_v
return HCDE,VCDE
def gradient_compute(HCDE : np.ndarray, VCDE : np.ndarray, y: np.ndarray)-> (np.ndarray, np.ndarray) :
# estimate the color difference gradient array in horizontal and vertical directions
new_gradient_vcde= cv2.copyMakeBorder(VCDE, 1,1,1,1, cv2.BORDER_CONSTANT,None,0)
new_gradient_hcde= cv2.copyMakeBorder(HCDE, 1,1,1,1, cv2.BORDER_CONSTANT,None,0)
gradient_V=np.zeros_like(y)
gradient_H=np.zeros_like(y)
rows,cols=y.shape
for i in range(rows):
for j in range(cols):
gradient_V[i,j]=np.abs(new_gradient_vcde[i,j+1]-new_gradient_vcde[i+2,j+1])
gradient_H[i,j]=np.abs(new_gradient_hcde[i+1,j]-new_gradient_hcde[i+1,j+2])
return gradient_H,gradient_V
def directionnal_weight(gradient_H : np.ndarray, gradient_V : np.ndarray)-> (np.ndarray, np.ndarray,
np.ndarray, np.ndarray):
# Calculate the weights for each direction by computing the reciprocal of power gradients
# in this direction within a local window 3*3
Kernel_Weight = np.ones((3, 3), dtype=np.float32)
Weight_H = cv2.filter2D(gradient_H, -1, Kernel_Weight, borderType=cv2.BORDER_REPLICATE)
Weight_V = cv2.filter2D(gradient_V, -1, Kernel_Weight, borderType=cv2.BORDER_REPLICATE)
Weight_W=cv2.copyMakeBorder(Weight_H,0,0,1,0, cv2.BORDER_REPLICATE,None,0)[:,:-1]
Weight_E=cv2.copyMakeBorder(Weight_H,0,0,0,1, cv2.BORDER_REPLICATE,None,0)[:,1:]
Weight_S=cv2.copyMakeBorder(Weight_V,0,1,0,0, cv2.BORDER_REPLICATE,None,0)[1:,:]
Weight_N=cv2.copyMakeBorder(Weight_V,1,0,0,0, cv2.BORDER_REPLICATE,None,0)[:-1,:]
Weight_W = 1.0 / (np.power(Weight_W, 2))
Weight_E = 1.0 / (np.power(Weight_E, 2))
Weight_S = 1.0 / (np.power(Weight_S, 2))
Weight_N = 1.0 / (np.power(Weight_N, 2))
return Weight_W, Weight_E, Weight_S, Weight_N
def final_estimation(Weight_W : np.ndarray, Weight_E : np.ndarray,
Weight_S : np.ndarray, Weight_N : np.ndarray,
HCDE : np.ndarray, VCDE : np.ndarray,
y : np.ndarray, z : np.ndarray,
size=9, sigma=1)-> np.ndarray:
# directional color differences and weight is computing together to estimate green
h = cv2.getGaussianKernel(size, sigma)
Ke = np.array([0, 0, 0, 0, 1, 1, 1, 1, 1], dtype=np.float32) * h.T
Kw = np.array([1, 1, 1, 1, 1, 0, 0, 0, 0], dtype=np.float32) * h.T
Ke /= np.sum(Ke, axis=1)
Kw /= np.sum(Kw, axis=1)
Ks = np.transpose(Ke)
Kn = np.transpose(Kw)
difn = cv2.filter2D(VCDE, -1, Kn, borderType=cv2.BORDER_REPLICATE)
difs = cv2.filter2D(VCDE, -1, Ks, borderType=cv2.BORDER_REPLICATE)
difw = cv2.filter2D(HCDE, -1, Kw, borderType=cv2.BORDER_REPLICATE)
dife = cv2.filter2D(HCDE, -1, Ke, borderType=cv2.BORDER_REPLICATE)
Wt = Weight_W + Weight_E + Weight_N + Weight_S
final_estimation = (Weight_N * difn + Weight_S * difs + Weight_W * difw + Weight_E * dife) / Wt
green = final_estimation + y
green_estimate = green * (z[:,:,1]==0) + z[:,:,1]
return green_estimate
def guided_filter(guide : np.ndarray, to_interpolate : np.ndarray,
guide_laplacian : np.ndarray, to_interpolate_laplacian : np.ndarray,
r=11, eps=0)-> (np.ndarray, np.ndarray) :
# interpolate the red or blue value with a guided filter create with the green estimation
kernel = np.ones((r, r), dtype=np.float32)
mask_normalize = cv2.filter2D(np.ones_like(guide)*(to_interpolate!=0), -1, kernel, borderType=cv2.BORDER_CONSTANT)
mask_normalize_ab = cv2.filter2D(np.ones_like(guide), -1, kernel, borderType=cv2.BORDER_CONSTANT)
mean_p = cv2.filter2D(to_interpolate_laplacian, -1, kernel, borderType=cv2.BORDER_CONSTANT)/mask_normalize
mean_I = cv2.filter2D(guide_laplacian, -1, kernel, borderType=cv2.BORDER_CONSTANT)/mask_normalize
corr_I = cv2.filter2D(guide_laplacian*guide_laplacian, -1, kernel, borderType=cv2.BORDER_CONSTANT)/mask_normalize
corr_Ip = cv2.filter2D(guide_laplacian*to_interpolate_laplacian, -1, kernel, borderType=cv2.BORDER_CONSTANT)/mask_normalize
var_I = corr_I - mean_I*mean_I
cov_Ip = corr_Ip - mean_I*mean_p
a = cov_Ip / (var_I + eps)
mean_guide = cv2.filter2D(guide, -1, kernel, borderType=cv2.BORDER_CONSTANT)/mask_normalize
mean_to_interpolate = cv2.filter2D(to_interpolate, -1, kernel, borderType=cv2.BORDER_CONSTANT)/mask_normalize
b = mean_to_interpolate - a * mean_guide
mean_a = cv2.filter2D(a, -1, kernel, borderType=cv2.BORDER_CONSTANT)/mask_normalize_ab
mean_b = cv2.filter2D(b, -1, kernel, borderType=cv2.BORDER_CONSTANT)/mask_normalize_ab
return mean_a,mean_b
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 28/01/2024
# Authors: BACH Antoine
File added
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.forward_model import CFA
from src.methods.bach_antoine.MLRI_reconstruction import MLRI_interpolation
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
# Performing the reconstruction.
# TODO
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
res = MLRI_interpolation(op, y)
return res
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 28/01/2024
# Authors: BACH Antoine
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment