Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • samanost/sicom_image_analysis_project
  • gerayelk/sicom_image_analysis_project
  • jelassiy/sicom_image_analysis_project
  • chardoto/sicom_image_analysis_project
  • chaarim/sicom_image_analysis_project
  • domers/sicom_image_analysis_project
  • elmurrt/sicom_image_analysis_project
  • sadonest/sicom_image_analysis_project
  • kouddann/sicom_image_analysis_project
  • mirabitj/sicom-image-analysis-project-mirabito
  • plotj/sicom_image_analysis_project
  • torrem/sicom-image-analysis-project-maxime-torre
  • dzike/sicom_image_analysis_project
  • daip/sicom_image_analysis_project
  • casanovv/sicom_image_analysis_project
  • girmarti/sicom_image_analysis_project
  • lioretn/sicom_image_analysis_project
  • lemoinje/sicom_image_analysis_project
  • ouahmanf/sicom_image_analysis_project
  • vouilloa/sicom_image_analysis_project
  • diopb/sicom_image_analysis_project
  • davidale/sicom_image_analysis_project
  • enza/sicom_image_analysis_project
  • conversb/sicom_image_analysis_project
  • mullemat/sicom_image_analysis_project
25 results
Show changes
Showing
with 1649 additions and 1 deletion
"""A file containing a (pretty useless) reconstruction.
It serves as example of how the project works.
This file should NOT be modified.
"""
import numpy as np
from scipy.signal import convolve2d
from src.forward_model import CFA
# High quality linear interpolation filters for bayer mosaic
bayer_g_at_r = np.array([[ 0, 0, -1, 0, 0],
[ 0, 0, 2, 0, 0],
[-1, 2, 4, 2, -1],
[ 0, 0, 2, 0, 0],
[ 0, 0, -1, 0, 0]]) / 8
bayer_g_at_b = bayer_g_at_r
bayer_r_at_green_rrow_bcol = np.array([[ 0, 0, 1/2, 0, 0],
[ 0, -1, 0, -1, 0],
[-1, 4, 5, 4, -1],
[ 0, -1, 0, -1, 0],
[ 0, 0, 1/2, 0, 0]]) / 8
bayer_r_at_green_brow_rcol = bayer_r_at_green_rrow_bcol.T
bayer_b_at_green_rrow_bcol = bayer_r_at_green_brow_rcol
bayer_b_at_green_brow_rcol = bayer_r_at_green_rrow_bcol
bayer_r_at_b = np.array([[ 0, 0, -3/2, 0, 0],
[ 0, 2, 0, 2, 0],
[-3/2, 0, 6, 0, -3/2],
[ 0, 2, 0, 2, 0],
[ 0, 0, -3/2, 0, 0]]) / 8
bayer_b_at_r = bayer_r_at_b
# High quality linear interpolation filters for quad mosaic
quad_g_at_r = np.array([[ 0, 0, 0, 0, -1, -1, 0, 0, 0, 0],
[ 0, 0, 0, 0, -1, -1, 0, 0, 0, 0],
[ 0, 0, 0, 0, 2, 2, 0, 0, 0, 0],
[ 0, 0, 0, 0, 2, 2, 0, 0, 0, 0],
[-1, -1, 2, 2, 4, 4, 2, 2, -1, -1],
[-1, -1, 2, 2, 4, 4, 2, 2, -1, -1],
[ 0, 0, 0, 0, 2, 2, 0, 0, 0, 0],
[ 0, 0, 0, 0, 2, 2, 0, 0, 0, 0],
[ 0, 0, 0, 0, -1, -1, 0, 0, 0, 0],
[ 0, 0, 0, 0, -1, -1, 0, 0, 0, 0]]) / 32
quad_g_at_b = quad_g_at_r
quad_r_at_green_rrow_bcol = np.array([[ 0, 0, 0, 0, 1/2, 1/2, 0, 0, 0, 0],
[ 0, 0, 0, 0, 1/2, 1/2, 0, 0, 0, 0],
[ 0, 0, -1, -1, 0, 0, -1, -1, 0, 0],
[ 0, 0, -1, -1, 0, 0, -1, -1, 0, 0],
[-1, -1, 4, 4, 5, 5, 4, 4, -1, -1],
[-1, -1, 4, 4, 5, 5, 4, 4, -1, -1],
[ 0, 0, -1, -1, 0, 0, -1, -1, 0, 0],
[ 0, 0, -1, -1, 0, 0, -1, -1, 0, 0],
[ 0, 0, 0, 0, 1/2, 1/2, 0, 0, 0, 0],
[ 0, 0, 0, 0, 1/2, 1/2, 0, 0, 0, 0]]) / 32
quad_r_at_green_brow_rcol = quad_r_at_green_rrow_bcol.T
quad_b_at_green_rrow_bcol = quad_r_at_green_brow_rcol
quad_b_at_green_brow_rcol = quad_r_at_green_rrow_bcol
quad_r_at_b = np.array([[ 0, 0, 0, 0, -3/2, -3/2, 0, 0, 0, 0],
[ 0, 0, 0, 0, -3/2, -3/2, 0, 0, 0, 0],
[ 0, 0, 2, 2, 0, 0, 2, 2, 0, 0],
[ 0, 0, 2, 2, 0, 0, 2, 2, 0, 0],
[-3/2, -3/2, 0, 0, 6, 6, 0, 0, -3/2, -3/2],
[-3/2, -3/2, 0, 0, 6, 6, 0, 0, -3/2, -3/2],
[ 0, 0, 2, 2, 0, 0, 2, 2, 0, 0],
[ 0, 0, 2, 2, 0, 0, 2, 2, 0, 0],
[ 0, 0, 0, 0, -3/2, -3/2, 0, 0, 0, 0],
[ 0, 0, 0, 0, -3/2, -3/2, 0, 0, 0, 0]]) / 32
quad_b_at_r = quad_r_at_b
def high_quality_linear_interpolation(op : CFA, y : np.ndarray) -> np.ndarray:
z = op.adjoint(y)
res = np.empty(op.input_shape)
mask = op.mask
R = 0 ; G = 1 ; B = 2
if op.cfa == 'bayer':
y_pad = np.pad(y, 2, 'constant', constant_values=0)
for i in range(y.shape[0]):
for j in range(y.shape[1]):
i_pad = i+2 ; j_pad = j+2
if mask[i, j, G] == 1: # We must estimate R and B components
res[i, j, G] = y[i, j]
# Estimate R (B at left and right or B at top and bottom)
if mask[i, max(0, j-1), R] == 1 or mask[i, min(y.shape[1]-1, j+1), R] == 1:
res[i, j, R] = float(convolve2d(y_pad[i_pad-2:i_pad+3, j_pad-2:j_pad+3], bayer_r_at_green_rrow_bcol, mode='valid'))
else:
res[i, j, R] = float(convolve2d(y_pad[i_pad-2:i_pad+3, j_pad-2:j_pad+3], bayer_r_at_green_brow_rcol, mode='valid'))
# Estimate B (R at left and right or R at top and bottom)
if mask[i, max(0, j-1), B] == 1 or mask[i, min(y.shape[1]-1, j+1), B] == 1:
res[i, j, B] = float(convolve2d(y_pad[i_pad-2:i_pad+3, j_pad-2:j_pad+3], bayer_b_at_green_brow_rcol, mode='valid'))
else:
res[i, j, B] = float(convolve2d(y_pad[i_pad-2:i_pad+3, j_pad-2:j_pad+3], bayer_b_at_green_rrow_bcol, mode='valid'))
elif mask[i, j, R] == 1: # We must estimate G and B components
res[i, j, R] = y[i, j]
res[i, j, G] = float(convolve2d(y_pad[i_pad-2:i_pad+3, j_pad-2:j_pad+3], bayer_g_at_r, mode='valid'))
res[i, j, B] = float(convolve2d(y_pad[i_pad-2:i_pad+3, j_pad-2:j_pad+3], bayer_b_at_r, mode='valid'))
else: # We must estimate R and G components
res[i, j, B] = y[i, j]
res[i, j, G] = float(convolve2d(y_pad[i_pad-2:i_pad+3, j_pad-2:j_pad+3], bayer_g_at_b, mode='valid'))
res[i, j, R] = float(convolve2d(y_pad[i_pad-2:i_pad+3, j_pad-2:j_pad+3], bayer_r_at_b, mode='valid'))
else:
y_pad = np.pad(y, 4, 'constant', constant_values=0)
for i in range(0, y.shape[0], 2):
for j in range(0, y.shape[1], 2):
i_pad = i+4 ; j_pad = j+4
if mask[i, j, G] == 1: # We must estimate R and B components
res[i:i+2, j:j+2, G] = y[i:i+2, j:j+2]
# Estimate R (B at left and right or B at top and bottom)
if mask[i, max(0, j-1), R] == 1 or mask[i, min(y.shape[1]-1, j+1), R] == 1:
res[i:i+2, j:j+2, R] = float(convolve2d(y_pad[i_pad-4:i_pad+6, j_pad-4:j_pad+6], quad_r_at_green_rrow_bcol, mode='valid'))
else:
res[i:i+2, j:j+2, R] = float(convolve2d(y_pad[i_pad-4:i_pad+6, j_pad-4:j_pad+6], quad_r_at_green_brow_rcol, mode='valid'))
# Estimate B (R at left and right or R at top and bottom)
if mask[i, max(0, j-1), B] == 1 or mask[i, min(y.shape[1]-1, j+1), B] == 1:
res[i:i+2, j:j+2, B] = float(convolve2d(y_pad[i_pad-4:i_pad+6, j_pad-4:j_pad+6], quad_b_at_green_brow_rcol, mode='valid'))
else:
res[i:i+2, j:j+2, B] = float(convolve2d(y_pad[i_pad-4:i_pad+6, j_pad-4:j_pad+6], quad_b_at_green_rrow_bcol, mode='valid'))
elif mask[i, j, R] == 1: # We must estimate G and B components
res[i:i+2, j:j+2, R] = y[i:i+2, j:j+2]
res[i:i+2, j:j+2, G] = float(convolve2d(y_pad[i_pad-4:i_pad+6, j_pad-4:j_pad+6], quad_g_at_r, mode='valid'))
res[i:i+2, j:j+2, B] = float(convolve2d(y_pad[i_pad-4:i_pad+6, j_pad-4:j_pad+6], quad_b_at_r, mode='valid'))
else: # We must estimate R and G components
res[i:i+2, j:j+2, B] = y[i:i+2, j:j+2]
res[i:i+2, j:j+2, G] = float(convolve2d(y_pad[i_pad-4:i_pad+6, j_pad-4:j_pad+6], quad_g_at_b, mode='valid'))
res[i:i+2, j:j+2, R] = float(convolve2d(y_pad[i_pad-4:i_pad+6, j_pad-4:j_pad+6], quad_r_at_b, mode='valid'))
return res
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
File added
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.forward_model import CFA
from src.methods.lioretn.demosaicking import high_quality_linear_interpolation
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
# Performing the reconstruction.
# TODO
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
res = high_quality_linear_interpolation(op, y)
return res
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
File added
import numpy as np
def is_green(i, j):
return (not i%2 and not j%2) or (i%2 and j%2)
def is_red(i, j):
return not i%2 and j%2
def is_blue(i, j):
return i%2 and not j%2
g_ker = np.array([
[0, 0, -1, 0, 0],
[0, 0, 2, 0, 0],
[-1, 2, 4, 2, -1],
[0, 0, 2, 0, 0],
[0, 0, -1, 0, 0]
]) / 8
rgrrow_ker = np.array([
[0, 0, 0.5, 0, 0],
[0, -1, 0, -1, 0],
[-1, 4, 5, 4, -1],
[0, -1, 0, -1, 0],
[0, 0, 0.5, 0, 0]
]) / 8
rgrcol_ker = np.array([
[0, 0, -1, 0, 0],
[0, -1, 4, -1, 0],
[0.5, 0, 5, 0, 0.5],
[0, -1, 4, -1, 0],
[0, 0, -1, 0, 0]
]) / 8
rb_ker = np.array([
[0, 0, -1.5, 0, 0],
[0, 2, 0, 2, 0],
[-1.5, 0, 6, 0, -1.5],
[0, 2, 0, 2, 0],
[0, 0, -1.5, 0, 0]
]) / 8
def high_quality_interpolation(image):
res = np.empty((image.shape[0], image.shape[1], 3))
estimated_green = np.zeros_like(image)
estimated_red = np.zeros_like(image)
estimated_blue = np.zeros_like(image)
for i in range(2, image.shape[0]-2):
for j in range(2, image.shape[1]-2):
if (is_red(i, j)):
estimated_red[i, j] = image[i, j]
estimated_green[i, j] = (image[i-2:i+3, j-2:j+3] * g_ker).sum()
estimated_blue[i, j] = (image[i-2:i+3, j-2:j+3] * rb_ker).sum()
elif (is_blue(i, j)):
estimated_red[i, j] = (image[i-2:i+3, j-2:j+3] * rb_ker).sum()
estimated_green[i, j] = (image[i-2:i+3, j-2:j+3] * g_ker).sum()
estimated_blue[i, j] = image[i, j]
else:
estimated_green[i, j] = image[i, j]
if (is_red(i-1, j)):
estimated_red[i, j] = (image[i-2:i+3, j-2:j+3] * rgrcol_ker).sum()
estimated_blue[i, j] = (image[i-2:i+3, j-2:j+3] * rgrrow_ker).sum()
else:
estimated_red[i, j] = (image[i-2:i+3, j-2:j+3] * rgrrow_ker).sum()
estimated_blue[i, j] = (image[i-2:i+3, j-2:j+3] * rgrcol_ker).sum()
res[:, :, 0] = estimated_red.clip(0, 1)
res[:, :, 1] = estimated_green.clip(0, 1)
res[:, :, 2] = estimated_blue.clip(0, 1)
return res
import numpy as np
def is_green(i, j):
return (not i%2 and not j%2) or (i%2 and j%2)
def is_red(i, j):
return not i%2 and j%2
def is_blue(i, j):
return i%2 and not j%2
def MDWI(image):
res = np.empty((image.shape[0], image.shape[1], 3))
estimated_green = np.zeros_like(image)
estimated_red = np.zeros_like(image)
estimated_blue = np.zeros_like(image)
EPSILON = 10e-4
# Estimation of green plan first
pre_estimation = {}
diag_gradient_factor = {}
weight = {}
H = [-8/256, 23/256, -48/256, 161/256, 161/256, -48/256, 23/256, -8/256]
coord_NE = [(-4, -3), (-3, -2), (-2, -1), (-1, 0), (0, 1), (1, 2), (2, 3), (3, 4)]
coord_SE = [(-3, -4), (-2, -3), (-1, -2), (0, -1), (1, 0), (2, 1), (3, 2), (4, 3)]
coord_NW = [(3, -4), (2, -3), (1, -2), (0, -1), (-1, 0), (-2, 1), (-3, 2), (-4, 3)]
coord_SW = [(4, -3), (3, -2), (2, -1), (1, 0), (0, 1), (-1, 2), (-2, 3), (-3, 4)]
for i in range(3, image.shape[0] - 4):
for j in range(3, image.shape[1] - 4):
if (not is_green(i, j)):
pre_estimation["N"] = image[i-1, j] + (image[i, j] - image[i-2, j]) / 2
pre_estimation["S"] = image[i+1, j] + (image[i, j] - image[i+2, j]) / 2
pre_estimation["W"] = image[i, j-1] + (image[i, j] - image[i, j-2]) / 2
pre_estimation["E"] = image[i, j+1] + (image[i, j] - image[i, j+2]) / 2
diag_gradient_factor["N"] = abs(image[i-2, j-1] - image[i, j-1]) + abs(image[i-3, j] - image[i-1, j]) + abs(image[i-2, j+1] - image[i, j+1]) + abs(image[i-3, j-1] - image[i-1, j-1]) + abs(image[i-3, j+1] - image[i-1, j+1]) + abs(image[i-2, j] - image[i, j]) + EPSILON
diag_gradient_factor["S"] = abs(image[i+2, j-1] - image[i, j-1]) + abs(image[i+3, j] - image[i+1, j]) + abs(image[i+2, j+1] - image[i, j+1]) + abs(image[i+3, j-1] - image[i+1, j-1]) + abs(image[i+3, j+1] - image[i+1, j+1]) + abs(image[i+2, j] - image[i, j]) + EPSILON
diag_gradient_factor["W"] = abs(image[i-1, j-2] - image[i-1, j]) + abs(image[i, j-3] - image[i, j-1]) + abs(image[i+1, j-2] - image[i+1, j]) + abs(image[i-1, j-3] - image[i-1, j-1]) + abs(image[i+1, j-3] - image[i+1, j-1]) + abs(image[i, j-2] - image[i, j]) + EPSILON
diag_gradient_factor["E"] = abs(image[i-1, j+2] - image[i-1, j]) + abs(image[i, j+3] - image[i, j+1]) + abs(image[i+1, j+2] - image[i+1, j]) + abs(image[i-1, j+3] - image[i-1, j+1]) + abs(image[i+1, j+3] - image[i+1, j+1]) + abs(image[i, j+2] - image[i, j]) + EPSILON
pre_estimation["NW"] = 0
pre_estimation["SW"] = 0
pre_estimation["NE"] = 0
pre_estimation["SE"] = 0
for k in range(8):
pre_estimation["NW"] += image[i + coord_NW[k][0], j + coord_NW[k][1]] * H[k]
pre_estimation["SW"] += image[i + coord_SW[k][0], j + coord_SW[k][1]] * H[k]
pre_estimation["NE"] += image[i + coord_NE[k][0], j + coord_NE[k][1]] * H[k]
pre_estimation["SE"] += image[i + coord_SE[k][0], j + coord_SE[k][1]] * H[k]
diag_gradient_factor["NW"] = abs(image[i-2, j-1] - image[i-1, j]) + abs(image[i-1, j] - image[i, j+1]) + abs(image[i-1, j-2] - image[i, j-1]) + abs(image[i, j-1] - image[i+1, j]) + abs(image[i-1, j-1] - image[i+1, j+1]) + abs(image[i-2, j-2] - image[i, j]) + EPSILON
diag_gradient_factor["SW"] = abs(image[i-1, j] - image[i, j-1]) + abs(image[i, j+1] - image[i+1, j]) + abs(image[i-1, j+1] - image[i+1, j-1]) + abs(image[i, j-1] - image[i+1, j-2]) + abs(image[i+1, j] - image[i+2, j-1]) + abs(image[i, j] - image[i+2, j-2]) + EPSILON
diag_gradient_factor["NE"] = abs(image[i-1, j] - image[i, j-1]) + abs(image[i, j+1] - image[i+1, j]) + abs(image[i-1, j+1] - image[i+1, j-1]) + abs(image[i-1, j] - image[i-2, j+1]) + abs(image[i, j+1] - image[i-1, j+2]) + abs(image[i, j] - image[i-2, j+2]) + EPSILON
diag_gradient_factor["SE"] = abs(image[i+2, j+1] - image[i+1, j]) + abs(image[i-1, j] - image[i, j+1]) + abs(image[i+1, j+2] - image[i, j+1]) + abs(image[i, j-1] - image[i+1, j]) + abs(image[i-1, j-1] - image[i+1, j+1]) + abs(image[i+2, j+2] - image[i, j]) + EPSILON
for k, v in diag_gradient_factor.items():
weight[k] = 1 / v
num = 0
den = 0
for k, v in pre_estimation.items():
num += v * weight[k]
den += weight[k]
estimated_green[i, j] = num / den
else:
estimated_green[i, j] = image[i, j]
# Then we work on blue and red plans
diag = [(-1, -1), (-1, 1), (1, -1), (1, 1)]
for i in range(3, image.shape[0] - 4):
for j in range(3, image.shape[1] - 4):
if (is_red(i, j) or is_blue(i, j)):
ksi_nw = image[i-1, j-1] - estimated_green[i-1, j-1]
ksi_ne = image[i-1, j+1] - estimated_green[i-1, j+1]
ksi_sw = image[i+1, j-1] - estimated_green[i+1, j-1]
ksi_se = image[i+1, j+1] - estimated_green[i+1, j+1]
g_nw = abs(estimated_green[i-2, j-1] - estimated_green[i-1, j]) + abs(estimated_green[i-1, j-2] - estimated_green[i, j-1]) + abs(estimated_green[i-2, j-2] - estimated_green[i-1, j-1]) + abs(estimated_green[i-1, j-1] - estimated_green[i, j]) + abs(image[i-1, j-1] - image[i+1, j+1]) + EPSILON
g_ne = abs(estimated_green[i-2, j+1] - estimated_green[i-1, j]) + abs(estimated_green[i-1, j+2] - estimated_green[i, j+1]) + abs(estimated_green[i-2, j+2] - estimated_green[i-1, j+1]) + abs(estimated_green[i-1, j+1] - estimated_green[i, j]) + abs(image[i-1, j+1] - image[i+1, j-1]) + EPSILON
g_sw = abs(estimated_green[i+2, j-1] - estimated_green[i+1, j]) + abs(estimated_green[i+1, j-2] - estimated_green[i, j-1]) + abs(estimated_green[i+2, j-2] - estimated_green[i+1, j-1]) + abs(estimated_green[i+1, j-1] - estimated_green[i, j]) + abs(image[i+1, j-1] - image[i-1, j+1]) + EPSILON
g_se = abs(estimated_green[i+2, j+1] - estimated_green[i+1, j]) + abs(estimated_green[i+1, j+2] - estimated_green[i, j+1]) + abs(estimated_green[i+2, j+2] - estimated_green[i+1, j+1]) + abs(estimated_green[i+1, j+1] - estimated_green[i, j]) + abs(image[i+1, j+1] - image[i-1, j-1]) + EPSILON
pre_est = estimated_green[i, j] + (ksi_nw/g_nw + ksi_ne/g_ne + ksi_sw/g_sw + ksi_se/g_se) / (1/g_nw + 1/g_ne + 1/g_sw + 1/g_se)
sum_dif = 0
sum_sig = 0
for coord in diag:
mu = image[i + coord[0], j + coord[1]] - pre_est
sum_dif += mu / (1 + abs(mu))
sum_sig += 1 / (1 + abs(mu))
if (is_red(i, j)):
estimated_blue[i, j] = pre_est
estimated_red[i, j] = image[i, j]
else:
estimated_red[i, j] = pre_est
estimated_blue[i, j] = image[i, j]
else:
if (is_red(i-1, j)):
estimated_red[i, j] = (image[i-1, j] + image[i+1, j]) / 2
estimated_blue[i, j] = (image[i, j-1] + image[i, j+1]) / 2
else:
estimated_red[i, j] = (image[i, j-1] + image[i, j+1]) / 2
estimated_blue[i, j] = (image[i-1, j] + image[i+1, j]) / 2
res[:, :, 0] = estimated_red
res[:, :, 1] = estimated_green
res[:, :, 2] = estimated_blue
return res
\ No newline at end of file
import numpy as np
from scipy.signal import convolve2d
def down_sample(image):
down_sampled = np.empty((int(image.shape[0] / 2), int(image.shape[1] / 2)))
down_sampled[:, :] = (image[::2, ::2] + image[1::2, ::2] + image[::2, 1::2] + image[1::2, 1::2]) / 4
return down_sampled
def up_sample(image):
up_sampled = np.empty((int(image.shape[0] * 2), int(image.shape[1] * 2), image.shape[2]))
up_sampled[::2, ::2, :] = image[::1, ::1, :]
up_sampled[1::2, ::2, :] = image[::1, ::1, :]
up_sampled[::2, 1::2, :] = image[::1, ::1, :]
up_sampled[1::2, 1::2, :] = image[::1, ::1, :]
return up_sampled
def refine(image):
res = np.empty((image.shape[0], image.shape[1], 3))
ker = np.array([[1, 2, 1], [2, 4, 2], [1, 2, 1]]) / 16
for i in range(3):
res[:, :, i] = convolve2d(image[:, :, i], ker, mode='same')
return res
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.forward_model import CFA
from src.methods.marty.mdwi import MDWI
from src.methods.marty.high_quality_interpolation import high_quality_interpolation
from src.methods.marty.quad_bayer import down_sample, up_sample, refine
# Either MDWI or high_quality_interpolation
method = high_quality_interpolation
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
# Performing the reconstruction.
if (cfa == "bayer") :
res = method(y)
else :
down_sample_image = down_sample(y)
bayer_image = method(down_sample_image)
res = up_sample(bayer_image)
res = refine(res)
return res
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
...@@ -5,7 +5,7 @@ Students can call their functions (declared in others files of src/methods/your_ ...@@ -5,7 +5,7 @@ Students can call their functions (declared in others files of src/methods/your_
import numpy as np import numpy as np
from src.methods.template.mourasa_reconstruct import * from src.methods.mouras_aubin.mourasa_reconstruct import *
from src.forward_model import CFA from src.forward_model import CFA
......
"""A file containing the main function for the chosen interpolation (Adams-Hamilton Algorithm).
"""
### BE CAREFUL TO ADD THE MODULE TORCH TO THE FILE requirements.txt
import numpy as np
from scipy.signal import convolve2d
import torch
import torch.nn.functional as F
from src.forward_model import CFA
# Initial kernels for the bilinear interpolation
ker_bayer_red_blue = np.array([[1, 2, 1],
[2, 4, 2],
[1, 2, 1]]) / 4
ker_bayer_green = np.array([[0, 1, 0],
[1, 4, 1],
[0, 1, 0]]) / 4
# Kernels for the quad_bayer filter
ker_quad_red_blue = np.array([[1, 1, 2, 2, 0, 1],
[1, 1, 2, 2, 1, 1],
[2, 2, 4, 4, 2, 2],
[2, 2, 4, 4, 2, 2],
[1, 1, 2, 2, 1, 1],
[1, 1, 2, 2, 1, 1]])/16
ker_quad_green = np.array([[0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 0, 0],
[1, 1, 4, 4, 1, 1],
[1, 1, 4, 4, 1, 1],
[0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 0, 0]])/16
def second_naive_interpolation(op: CFA, y: np.ndarray) -> np.ndarray:
"""Performs a naive interpolation of the lost pixels. for the quad_bayer, performs a convolution
with kernels (above: ker_quad_red_blue and ker_quad_green) inspired from the classic bilnear interpolation
Args:
op (CFA): CFA operator.
y (np.ndarray): Mosaicked image.
Returns:
np.ndarray: Demosaicked image.
"""
z = op.adjoint(y)
if op.cfa == 'bayer':
res = np.empty(op.input_shape)
res[:, :, 0] = convolve2d(z[:, :, 0], ker_bayer_red_blue, mode='same')
res[:, :, 1] = convolve2d(z[:, :, 1], ker_bayer_green, mode='same')
res[:, :, 2] = convolve2d(z[:, :, 2], ker_bayer_red_blue, mode='same')
else:
res = np.empty(op.input_shape)
res[:, :, 0] = convolve2d(z[:, :, 0], ker_quad_red_blue, mode='same')
res[:, :, 1] = convolve2d(z[:, :, 1], ker_quad_green, mode='same')
res[:, :, 2] = convolve2d(z[:, :, 2], ker_quad_red_blue, mode='same')
#res[:, :, 0] = convolution_pad_stride(z[:, :, 0], ker_quad_red_blue)
#res[:, :, 1] = convolution_pad_stride(z[:, :, 1], ker_quad_green)
#res[:, :, 2] = convolution_pad_stride(z[:, :, 2], ker_quad_red_blue)
return res
import torch
import torch.nn.functional as F
def convolution_pad_stride(input, kernel):
"""An attempt of convolution with stride and padding.
Args:
input(np.ndarray): input (mosaicked) image.
kernel (np.ndarray): convolution kernel.
Returns:
np.ndarray: Convolution of the image with the kernel with stride of 2 and padding of 512.
"""
input_tensor = torch.tensor(np.expand_dims(input, axis=(0,1)))
kernel_tensor = torch.tensor(np.expand_dims(kernel, axis=(0,1)))
output = F.conv2d(input_tensor, kernel_tensor, stride=2, padding=514)
output = output.numpy().squeeze()
return output
import numpy as np
def take(array2d, i, j):
"""
Helper function that returns the indices of an array, prevents index outbound
Args:
array2d (np.ndarray): input array
i: row index
j: column index
Returns:
np.float64(0): array value at position[i,j]
"""
if 0 <= i < array2d.shape[0] and 0 <= j < array2d.shape[1]:
return array2d[i, j]
return np.float64(0)
def red_blue_positions(img):
"""
Helper function that yields the red and blue positions in the mosaicked image
Args:
img (np.ndarray): mosaicked image
Returns:
None
"""
first_non_green = 1
for i in range(img.shape[0]):
for j in range(first_non_green, img.shape[1], 2):
yield i, j
first_non_green = 1 - first_non_green
def directional_green_interpolation(img):
"""
Function that performs green interpolation in horizontal and vertical directions.
Args:
img (np.ndarray): input (mosaicked) image
Returns:
green_h, green_v (np.ndarray): horizontally and vertically interpolated green components
"""
green_h = img.copy() # green positions are copied
green_v = img.copy() # other values will be replaced
for i, j in red_blue_positions(img):
r = lambda k: take(img, i, j + k) # r - relative indexing
green_h[i, j] = (r(1) + r(-1) + r(0)) / 2 - (r(2) + r(-2)) / 4
r = lambda k: take(img, i + k, j)
green_v[i, j] = (r(1) + r(-1) + r(0)) / 2 - (r(2) + r(-2)) / 4
return green_h, green_v
def green_decision(img, green_h, green_v, cardinal_directions_improvement = True):
"""
Function that performs the green decision between the chrominance components based on the color difference uniformity
by calculating the horizontal and the vertical gradients.
Args:
img (np.ndarray): input (mosaicked) image
green_h (np.ndarray): horizontally interpolated green component
green_v (np.ndarray): vertically interpolated green component
cardinal_directions_improvement (bool) = True (default) : parameter that allows to adjust a given window weight in order
to improve the gradients calculation
Returns:
green (np.ndarray): interpolated green image
delta_h (np.ndarray): horizontal gradient image
delta_v (np.ndarray): vertical gradient image
"""
height, width = img.shape
# "chrominance" is R - G in red locations, B - G in blue locations
# and 0 in green locations
chrominance_h = img - green_h
chrominance_v = img - green_v
# also 0 in green locations, this will be useful
gradient_h = chrominance_h.copy()
gradient_v = chrominance_v.copy()
for i, j in red_blue_positions(img):
gradient_h[i, j] -= take(chrominance_h, i, j + 2)
gradient_v[i, j] -= take(chrominance_v, i + 2, j)
gradient_h = np.abs(gradient_h)
gradient_v = np.abs(gradient_v)
# could be easily rewritten without loops
window = np.ones(shape=(5, 5), dtype=np.float64)
if cardinal_directions_improvement:
window[2, :] = 3
window[:, 2] = 3
delta_h = np.zeros(shape=(img.shape), dtype=np.float64)
delta_v = delta_h.copy()
padded_grad_h = np.zeros(shape=(img.shape[0] + 4, img.shape[1] + 4), dtype=np.float64)
padded_grad_v = padded_grad_h.copy()
padded_grad_h[2 : img.shape[0] + 2, 2 : img.shape[1] + 2] = gradient_h
padded_grad_v[2 : img.shape[0] + 2, 2 : img.shape[1] + 2] = gradient_v
green = green_h.copy()
for i, j in red_blue_positions(img):
delta_h[i, j] = np.sum(window * padded_grad_h[i : i + 5, j : j + 5])
delta_v[i, j] = np.sum(window * padded_grad_v[i : i + 5, j : j + 5])
if delta_v[i, j] < delta_h[i, j]:
green[i, j] = green_v[i, j]
return green, delta_h, delta_v
def red_blue_interpolation(img, green, delta_h, delta_v):
"""
Function that performs the red and blue components interpolation.
Args:
img (np.ndarray): input (mosaicked) image
green (np.ndarray): interpolated green image
delta_h (np.ndarray): horizontal gradient image
delta_v (np.ndarray): vertical gradient image
Returns:
red, blue (np.ndarray): interpolated red and blue image components
"""
height, width = img.shape
red = img.copy()
blue = img.copy()
# green positions first
for i in range(0, height, 2): # green-red rows
for j in range(0, width, 2):
red[i, j] = (take(img, i, j - 1) +
take(img, i, j + 1)) / 2
blue[i, j] = (take(img, i - 1, j) +
take(img, i + 1, j)) / 2
for i in range(1, height, 2): # green-blue rows
for j in range(1, width, 2):
blue[i, j] = (take(img, i, j - 1) +
take(img, i, j + 1)) / 2
red[i, j] = (take(img, i - 1, j) +
take(img, i + 1, j)) / 2
# now red in blue positions, blue in red positions
red_minus_blue = red - blue
for i in range(1, height, 2):
for j in range(0, width, 2):
if delta_v[i, j] < delta_h[i, j]:
red[i, j] = blue[i, j] + (take(red_minus_blue, i - 1, j) +
take(red_minus_blue, i + 1, j)) / 2
else:
red[i, j] = blue[i, j] + (take(red_minus_blue, i, j - 1) +
take(red_minus_blue, i, j + 1)) / 2
for i in range(0, height, 2):
for j in range(1, width, 2):
if delta_v[i, j] < delta_h[i, j]:
blue[i, j] = red[i, j] - (take(red_minus_blue, i - 1, j) +
take(red_minus_blue, i + 1, j)) / 2
else:
blue[i, j] = red[i, j] - (take(red_minus_blue, i, j - 1) +
take(red_minus_blue, i, j + 1)) / 2
return red, blue
def demosaicking_algorithm(img):
"""
Main function of the Daniele Menon demosaicking algorithm.
Args:
img (np.ndarray): input (mosaicked image)
Returns:
np.ndarray: reconstructed image
"""
green_h, green_v = directional_green_interpolation(img)
green, delta_h, delta_v = green_decision(img, green_h, green_v)
red, blue = red_blue_interpolation(img, green, delta_h, delta_v)
return np.clip(np.dstack((red, green, blue)), 0, 1)
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
\ No newline at end of file
File added
source diff could not be displayed: it is too large. Options to address this: view the blob.
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.forward_model import CFA
from src.methods.nada_kouddane.functions import second_naive_interpolation, demosaicking_algorithm
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
res = demosaicking_algorithm(y)
return res
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
File added
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.forward_model import CFA
from src.methods.quelletl.some_function import interpolation
import cv2
from sklearn.ensemble import RandomForestRegressor
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error
from scipy.signal import convolve2d
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
res = interpolation(y, op)
return res
def quad_to_bayer(y):
for i in range(1, y.shape[0], 4):
save = np.copy(y[:,i])
y[:,i] = y[:,i+1]
y[:,i+1] = save
for j in range(1, y.shape[0], 4):
save = np.copy(y[j,:])
y[j,:] = y[j+1,:]
y[j+1,:] = save
for i in range(1, y.shape[0], 4):
for j in range(1, y.shape[0], 4):
save = np.copy(y[i,j])
y[i,j] = y[i+1,j+1]
y[i+1,j+1] = save
return y
ker_bayer_green = np.array([[0, 1, 0], [1, 4, 1], [0, 1, 0]]) / 4
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
import numpy as np
def bayer_blue_red(res, y, i, j) :
"""
Compute estimated blue/red pixel in red/blue bayer pixel
Args :
res : estimated image
y : image to reconstruct
i, j : indices
Return :
value : value of the estimated pixels
"""
K2 = res[i-1,j-1,1] - y[i-1,j-1]
K4 = res[i-1,j+1,1] - y[i-1,j+1]
K10 = res[i+1,j-1,1] - y[i+1,j-1]
K12 = res[i+1,j+1,1] - y[i+1,j+1]
value = res[i,j,1]-1/4*(K2+K4+K10+K12)
return value
def bayer_green_vert(res, y, i, j) :
"""
Compute estimated blue/red pixel in green bayer pixel in vertical direction
Args :
res : estimated image
y : image to reconstruct
i, j : indices
Return :
value : value of the estimated pixels
"""
k1 = res[i-1,j,1] - y[i-1,j]
k2 = res[i+1,j,1] - y[i+1,j]
value = y[i,j] - 1/2*(k1+k2)
return value
def bayer_green_hor(res, y, i, j):
"""
Compute estimated blue/red pixel in green bayer pixel in horizontal direction
Args :
res : estimated image
y : image to reconstruct
i, j : indices
Return :
value : value of the estimated pixels
"""
k1 = res[i,j-1,1] - y[i,j-1]
k2 = res[i,j+1,1] - y[i,j+1]
value = y[i,j] - 1/2*(k1+k2)
return value
def interpolate_green(res, y, z):
"""
Directional interpolation of the green channel
Args :
res : estimated image
y : image to reconstruct
z : bayer pattern
Return :
res : reconstructed image
"""
for i in range(2,y.shape[0]-1):
for j in range(2,y.shape[1]-1):
# Vertical and horizontal gradient
if z[i,j,1]==0:
d_h = np.abs(y[i,j-1]-y[i,j+1])
d_v = np.abs(y[i-1,j]-y[i+1,j])
if d_h > d_v:
green = (y[i-1,j]+y[i+1,j])/2
elif d_v > d_h:
green = (y[i,j-1]+y[i,j+1])/2
else :
green = (y[i,j-1]+y[i,j+1]+y[i-1,j]+y[i+1,j])/4
else :
green = y[i,j]
res[i,j,1] = green
return res
def quad_to_bayer(y):
"""
Convert Quad Bayer to Bayer
Args :
res : estimated image
y : image to reconstruct
i, j : indices
Return :
value : value of the estimated pixels
"""
for i in range(1, y.shape[0], 4):
save = np.copy(y[:,i])
y[:,i] = y[:,i+1]
y[:,i+1] = save
for j in range(1, y.shape[0], 4):
save = np.copy(y[j,:])
y[j,:] = y[j+1,:]
y[j+1,:] = save
for i in range(1, y.shape[0], 4):
for j in range(1, y.shape[0], 4):
save = np.copy(y[i,j])
y[i,j] = y[i+1,j+1]
y[i+1,j+1] = save
return y
def interpolation(y, op):
"""
Reconstruct image
Args :
y : image to reconstruct
op : CFA operator
Return :
np.ndarray: Demosaicked image.
"""
if op.cfa == 'quad_bayer':
y = quad_to_bayer(y)
op.mask = quad_to_bayer(op.mask)
z = op.adjoint(y)
res = np.empty(op.input_shape)
# Interpolation of green channel
res = interpolate_green(res, y, z)
# Interpolation of R and B channels using channel correlation
for i in range(2,y.shape[0]-2):
for j in range(2, y.shape[1]-2):
# Bayer is Green
if z[i,j,1] != 0 :
# Green is between 2 vertical bleu
if z[i+1,j,0] == 0:
red = bayer_green_hor(res, y, i, j) # Compute Red channel
blue = bayer_green_vert(res, y, i, j) # Compute Blue channel
# Green is between 2 vertical red
else:
blue = bayer_green_hor(res, y, i, j) # Compute Blue channel
red = bayer_green_vert(res, y, i, j) # Compute Red channel
# Bayer is red
elif z[i,j,0] != 0 :
red = y[i,j] # Red channel
blue = bayer_blue_red(res, y, i, j) # Blue channel
# Bayer is bleue
elif z[i,j,2] != 0 :
blue = y[i,j] # Bleu channel
red = bayer_blue_red(res, y, i, j) # Res channel
res[i,j,0] = np.clip(red, 0, 255)
res[i,j,2] = np.clip(blue,0,255)
return res
import numpy as np
from src.forward_model import CFA
import cv2 as cv2
def hamilton_adams(y, input_shape):
n,p = input_shape[0], input_shape[1]
z = np.copy(y)
for i in range(2 , n-2): #green interpolation by gradient comparison for every red and blue pixels
for j in range(2, p-2):
if z[i,j,1] == 0 and z[i,j,0] != 0: #red pixel
#Vertical and horizontal gradient
grad_y = np.abs(z[i-1,j,1] - z[i+1,j,1]) + np.abs(2*z[i,j,0] - z[i-2,j,0] - z[i+2,j,0])
grad_x = np.abs(z[i,j-1,1] - z[i,j+1,1]) + np.abs(2*z[i,j,0] - z[i,j-2,0] - z[i,j+2,0])
if grad_x < grad_y:
z[i,j,1] = (z[i,j-1,1] + z[i,j+1,1])/2 + (2*z[i,j,0] - z[i,j-2,0] - z[i,j+2,0])/4
elif grad_x > grad_y:
z[i,j,1] = (z[i-1,j,1] + z[i+1,j,1])/2 + (2*z[i,j,0] - z[i-2,j,0] - z[i+2,j,0])/4
else:
z[i,j,1] = (z[i-1,j,1] + z[i+1,j,1] + z[i,j-1,1] + z[i,j+1,1])/4 + (2*z[i,j,0] - z[i,j-2,0] - z[i,j+2,0] + 2*z[i,j,0] - z[i-2,j,0] - z[i+2,j,0])/8
elif z[i,j,1] == 0 and z[i,j,2] != 0: #blue pixel
#Vertical and horizontal gradient
grad_y = np.abs(z[i-1,j,1] - z[i+1,j,1]) + np.abs(2*z[i,j,2] - z[i-2,j,2] - z[i+2,j,2])
grad_x = np.abs(z[i,j-1,1] - z[i,j+1,1]) + np.abs(2*z[i,j,2] - z[i,j-2,2] - z[i,j+2,2])
if grad_x < grad_y:
z[i,j,1] = (z[i,j-1,1] + z[i,j+1,1])/2 + (2*z[i,j,2] - z[i,j-2,2] - z[i,j+2,2])/4
elif grad_x > grad_y:
z[i,j,1] = (z[i-1,j,1] + z[i+1,j,1])/2 + (2*z[i,j,2] - z[i-2,j,2] - z[i+2,j,2])/4
else:
z[i,j,1] = (z[i-1,j,1] + z[i+1,j,1] + z[i,j-1,1] + z[i,j+1,1])/4 + (2*z[i,j,2] - z[i,j-2,2] - z[i,j+2,2] + 2*z[i,j,2] - z[i-2,j,2] - z[i+2,j,2])/8
for i in range(1 , n-1): #red/blue interpolation by bilinear interpolation on blue/red pixels
for j in range(1, p-1):
if z[i,j,2] != 0 :
z[i,j,0] = (z[i-1,j-1,0] + z[i-1,j+1,0] + z[i+1,j-1,0] + z[i+1,j+1,0]) / 4
elif z[i,j,0] != 0:
z[i,j,2] = (z[i-1,j-1,2] + z[i-1,j+1,2] + z[i+1,j-1,2] + z[i+1,j+1,2]) / 4
else:
z[i,j] = z[i,j]
for i in range(1 , n-1): #blue and red interpolation by bilinear interpolation on green pixels
for j in range(1, p-1):
if z[i,j,0] == z[i,j,2]:
z[i,j,0] = (z[i-1,j,0] + z[i,j-1,0] + z[i+1,j,0] + z[i,j+1,0]) / 4
z[i,j,2] = (z[i-1,j,2] + z[i,j-1,2] + z[i+1,j,2] + z[i,j+1,2]) / 4
return z
# def SSD(y, cfa_img, input_shape): #SSD algo
# n,p = input_shape[0], input_shape[1]
# hlist = [16,4,1]
# res = np.copy(y)
# for h in hlist:
# res = NLh(res, cfa_img ,n,p,h)
# res = CR(res,n,p)
# return res
# def NLh(y, cfa_img, n, p, h): #NLh part
# res = np.copy(cfa_img)
# for i in range(4,n-3):
# for j in range(4,p-3):
# if cfa_img[i,j,0] != 0:
# res[i,j,1] = NLh_calc(y, cfa_img, i, j, 1, h)
# res[i,j,2] = NLh_calc(y, cfa_img, i, j, 2, h)
# print((i,j))
# elif cfa_img[i,j,1] != 0:
# res[i,j,0] = NLh_calc(y, cfa_img, i, j, 0, h)
# res[i,j,2] = NLh_calc(y, cfa_img, i, j, 2, h)
# print((i,j))
# else:
# res[i,j,0] = NLh_calc(y, cfa_img, i, j, 0, h)
# res[i,j,1] = NLh_calc(y, cfa_img, i, j, 1, h)
# print((i,j))
# return res
# def NLh_calc(y, cfa_img, i, j, channel, h): #aux function to calculate the main sum for each pixel
# sum = 0
# norm = 0
# for k in range(-2,3):
# for l in range(-2,3):
# if k!=0 and j!=0:
# a = poids(y, channel, i,j,k,l,h)
# sum += a * cfa_img[i+k,j+l,channel]
# norm += a
# return sum / norm
# def poids(y, channel, i,j,k,l,h): #aux function to calcultate the weight for a given p=(i,j) , q=(k,l)
# som = 0
# # for tx in range(-1,2):
# # for ty in range(-1,2):
# som += (np.abs(y[i-1 , j-1, channel] - y[k+1 , l-1, channel]))**2
# som += (np.abs(y[i-1 , j, channel] - y[k+1 , l, channel]))**2
# som += (np.abs(y[i-1 , j+1, channel] - y[k+1 , l+1, channel]))**2
# som += (np.abs(y[i, j-1, channel] - y[k, l-1, channel]))**2
# som += (np.abs(y[i, j, channel] - y[k, l, channel]))**2
# som += (np.abs(y[i, j+1, channel] - y[k, l+1, channel]))**2
# som += (np.abs(y[i+1 , j-1, channel] - y[k+1 , l-1, channel]))**2
# som += (np.abs(y[i+1 , j, channel] - y[k+1 , l, channel]))**2
# som += (np.abs(y[i+1 , j+1, channel] - y[k+1 , l+1, channel]))**2
# res = np.exp((-1/h**2) * som)
# return res
# def CR(img_rgb): #Chrominance median
# res = cv2.cvtColor(img_rgb, cv2.COLOR_RGB2YUV)
# y, u, v = cv2.split(res)
# U_med = cv2.medianBlur(u, 3)
# V_med = cv2.medianBlur(v, 3)
# y = cv2.cvtColor(y, cv2.COLOR_GRAY2RGB)
# u = cv2.cvtColor(u, cv2.COLOR_GRAY2RGB)
# v = cv2.cvtColor(v, cv2.COLOR_GRAY2RGB)
# res = np.vstack([y, u, v])
# return res
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.forward_model import CFA
from src.methods.ramanantsitonta_harizo.fonctions import hamilton_adams #, SSD
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
# Performing the reconstruction.
# TODO
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
cfa_img = op.adjoint(y)
res = hamilton_adams(cfa_img, input_shape)
#res = SSD(res, cfa_img, input_shape)
return res
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
File added