Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • samanost/sicom_image_analysis_project
  • gerayelk/sicom_image_analysis_project
  • jelassiy/sicom_image_analysis_project
  • chardoto/sicom_image_analysis_project
  • chaarim/sicom_image_analysis_project
  • domers/sicom_image_analysis_project
  • elmurrt/sicom_image_analysis_project
  • sadonest/sicom_image_analysis_project
  • kouddann/sicom_image_analysis_project
  • mirabitj/sicom-image-analysis-project-mirabito
  • plotj/sicom_image_analysis_project
  • torrem/sicom-image-analysis-project-maxime-torre
  • dzike/sicom_image_analysis_project
  • daip/sicom_image_analysis_project
  • casanovv/sicom_image_analysis_project
  • girmarti/sicom_image_analysis_project
  • lioretn/sicom_image_analysis_project
  • lemoinje/sicom_image_analysis_project
  • ouahmanf/sicom_image_analysis_project
  • vouilloa/sicom_image_analysis_project
  • diopb/sicom_image_analysis_project
  • davidale/sicom_image_analysis_project
  • enza/sicom_image_analysis_project
  • conversb/sicom_image_analysis_project
  • mullemat/sicom_image_analysis_project
25 results
Show changes
Showing
with 1122 additions and 0 deletions
import numpy as np
from .clip_values import clip_values
from .process_channel import process_channel
from .process_interpolation import process_interpolation
def process_red_channel(img: np.ndarray, N: int, M: int) -> np.ndarray:
"""Process the red channel of an image
Args:
img (np.ndarray): image to process
N (int): height of the image
M (int): width of the image
Returns:
np.ndarray: processed image
"""
img = process_interpolation(img, 0, N, M)
img = process_channel(img, 0, N, M)
img = clip_values(img)
return img
\ No newline at end of file
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.forward_model import CFA
from src.methods.RHOUCH_Oussama.process_green_channel import process_green_channel
from src.methods.RHOUCH_Oussama.process_red_channel import process_red_channel
from src.methods.RHOUCH_Oussama.process_blue_channel import process_blue_channel
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
# Performing the reconstruction.
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
img_restored = op.adjoint(y)
N, M = img_restored.shape[:2]
img_restored = process_green_channel(img_restored, N, M)
img_restored = process_red_channel(img_restored, N, M)
img_restored = process_blue_channel(img_restored, N, M)
return img_restored
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
File added
"""A file containing a (pretty useless) reconstruction.
It serves as example of how the project works.
This file should NOT be modified.
"""
import numpy as np
from scipy.signal import convolve2d
from src.forward_model import CFA
def naive_interpolation(op: CFA, y: np.ndarray) -> np.ndarray:
"""Performs a simple interpolation of the lost pixels.
Args:
op (CFA): CFA operator.
y (np.ndarray): Mosaicked image.
Returns:
np.ndarray: Demosaicked image.
"""
z = op.adjoint(y)
if op.cfa == 'bayer':
res = np.empty(op.input_shape)
res[:, :, 0] = convolve2d(z[:, :, 0], ker_bayer_red_blue, mode='same')
res[:, :, 1] = convolve2d(z[:, :, 1], ker_bayer_green, mode='same')
res[:, :, 2] = convolve2d(z[:, :, 2], ker_bayer_red_blue, mode='same')
else:
res = np.empty(op.input_shape)
res[:, :, 0] = varying_kernel_convolution(z[:, :, 0], K_list_red)
res[:, :, 1] = varying_kernel_convolution(z[:, :, 1], K_list_green)
res[:, :, 2] = varying_kernel_convolution(z[:, :, 2], K_list_blue)
return res
def extract_padded(M, size, i, j):
N_i, N_j = M.shape
res = np.zeros((size, size))
middle_size = int((size - 1) / 2)
for ii in range(- middle_size, middle_size + 1):
for jj in range(- middle_size, middle_size + 1):
if i + ii >= 0 and i + ii < N_i and j + jj >= 0 and j + jj < N_j:
res[middle_size + ii, middle_size + jj] = M[i + ii, j + jj]
return res
def varying_kernel_convolution(M, K_list):
N_i, N_j = M.shape
res = np.zeros_like(M)
for i in range(N_i):
for j in range(N_j):
res[i, j] = np.sum(extract_padded(M, K_list[4 * (i % 4) + j % 4].shape[0], i, j) * K_list[4 * (i % 4) + j % 4])
np.clip(res, 0, 1, res)
return res
K_identity = np.zeros((5, 5))
K_identity[2, 2] = 1
K_red_0 = np.zeros((5, 5))
K_red_0[2, :] = np.array([-3, 13, 0, 0, 2]) / 12
K_red_1 = np.zeros((5, 5))
K_red_1[2, :] = np.array([2, 0, 0, 13, -3]) / 12
K_red_8 = np.zeros((5, 5))
K_red_8[:2, :2] = np.array([[-1, -1], [-1, 9]]) / 6
K_red_9 = np.zeros((5, 5))
K_red_9[:2, 3:] = np.array([[-1, -1], [9, -1]]) / 6
K_red_10 = np.zeros((5, 5))
K_red_10[:, 2] = np.array([-3, 13, 0, 0, 2]) / 12
K_red_12 = np.zeros((5, 5))
K_red_12[3:, :2] = np.array([[-1, 9], [-1, -1]]) / 6
K_red_13 = np.zeros((5, 5))
K_red_13[3:, 3:] = np.array([[9, -1], [-1, -1]]) / 6
K_red_14 = np.zeros((5, 5))
K_red_14[:, 2] = np.array([2, 0, 0, 13, -3]) / 12
K_list_red = [K_red_0, K_red_1, K_identity, K_identity, K_red_0, K_red_1, K_identity, K_identity, K_red_8, K_red_9, K_red_10, K_red_10, K_red_12, K_red_13, K_red_14, K_red_14]
K_green_2 = np.zeros((5, 5))
K_green_2[2, :] = [-3, 13, 0, 0, 2]
K_green_2[:, 2] = [-3, 13, 0, 0, 2]
K_green_2 = K_green_2 / 24
K_green_3 = np.zeros((5, 5))
K_green_3[2, :] = [2, 0, 0, 13, -3]
K_green_3[:, 2] = [-3, 13, 0, 0, 2]
K_green_3 = K_green_3 / 24
K_green_6 = np.zeros((5, 5))
K_green_6[2, :] = [-3, 13, 0, 0, 2]
K_green_6[:, 2] = [2, 0, 0, 13, -3]
K_green_6 = K_green_6 / 24
K_green_7 = np.zeros((5, 5))
K_green_7[2, :] = [2, 0, 0, 13, -3]
K_green_7[:, 2] = [2, 0, 0, 13, -3]
K_green_7 = K_green_7 / 24
K_list_green = [K_identity, K_identity, K_green_2, K_green_3, K_identity, K_identity, K_green_6, K_green_7, K_green_2, K_green_3, K_identity, K_identity, K_green_6, K_green_7, K_identity, K_identity]
K_list_blue = [K_red_10, K_red_10, K_red_8, K_red_9, K_red_14, K_red_14, K_red_12, K_red_13, K_identity, K_identity, K_red_0, K_red_1, K_identity, K_identity, K_red_0, K_red_1]
ker_bayer_red_blue = np.array([[1, 2, 1], [2, 4, 2], [1, 2, 1]]) / 4
ker_bayer_green = np.array([[0, 1, 0], [1, 4, 1], [0, 1, 0]]) / 4
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
from scipy.signal import convolve2d
import numpy as np
def compute_gradients(R_directions, G_directions, B_directions, L):
"""
This function will compute the gradients of the reconstructed channels, in each direction and in the YUV space
Inputs :
- R_directions, G_directions, B_directions : the reconstructed channels
- L : the size of the neighborhood on which to compute the gradients
Outputs :
- gradients : the gradients of the reconstructed channels in each direction
"""
Y = 0.299 * R_directions + 0.587 * G_directions + 0.114 * B_directions
U = R_directions - Y
V = B_directions - Y
# U_roll_north = np.roll(U, -L, axis=0)
# U_roll_south = np.roll(U, L, axis=0)
# U_roll_east = np.roll(U, -L, axis=1)
# U_roll_west = np.roll(U, L, axis=1)
# V_roll_north = np.roll(V, -L, axis=0)
# V_roll_south = np.roll(V, L, axis=0)
# V_roll_east = np.roll(V, -L, axis=1)
# V_roll_west = np.roll(V, L, axis=1)
H, W = U.shape[0], U.shape[1]
gradients = np.zeros((H, W, 4))
for height in range(U.shape[0]):
for width in range(U.shape[1]):
somme_u = [0, 0, 0, 0]
somme_v = [0, 0, 0, 0]
for l in range(1, L+1):
somme_u[0] += (U[height, width, 0] -
U[(height-l) % H, width, 0])**2
somme_u[1] += (U[height, width, 1] -
U[(height+l) % H, width, 1])**2
somme_u[2] += (U[height, width, 2] -
U[height, (width-l) % W, 2])**2
somme_u[3] += (U[height, width, 3] -
U[height, (width+l) % W, 3])**2
somme_v[0] += (V[height, width, 0] -
V[(height-l) % H, width, 0])**2
somme_v[1] += (V[height, width, 1] -
V[(height+l) % H, width, 1])**2
somme_v[2] += (V[height, width, 2] -
V[height, (width-l) % W, 2])**2
somme_v[3] += (V[height, width, 3] -
V[height, (width+l) % W, 3])**2
gradients[height, width] = 1/L * \
(np.sqrt(somme_u) + np.sqrt(somme_v))
return gradients
def local_int(CFA_image, beta, L, epsilon, cfa_mask):
"""
This function will compute the reconstructed image, using directional interpolation, and will then select the best direction for each pixel.
Inputs :
- CFA_image : the three channels of the image (CFA)
- beta : the parameter of intercorrelation between the channels
- L : the size of the neighborhood on which to test the direction
- epsilon : the threshold for the selection of the direction
Outputs :
- R, G, B : the reconstructed channels
"""
G_directions = np.zeros((CFA_image.shape[0], CFA_image.shape[1], 4))
R_directions = np.zeros((CFA_image.shape[0], CFA_image.shape[1], 4))
B_directions = np.zeros((CFA_image.shape[0], CFA_image.shape[1], 4))
RG_directions = np.zeros((CFA_image.shape[0], CFA_image.shape[1], 4))
BG_directions = np.zeros((CFA_image.shape[0], CFA_image.shape[1], 4))
# Directionnal interpolation of green channel
G_directions[:, :, 0] = CFA_image[:, :, 1] + \
np.roll(CFA_image[:, :, 1], -1, axis=0) + \
beta/2 * (CFA_image[:, :, 0] - np.roll(CFA_image[:, :, 0], -2, axis=0))
G_directions[:, :, 1] = CFA_image[:, :, 1] + \
np.roll(CFA_image[:, :, 1], 1, axis=0) + \
beta/2 * (CFA_image[:, :, 0] - np.roll(CFA_image[:, :, 0], 2, axis=0))
G_directions[:, :, 2] = CFA_image[:, :, 1] + \
np.roll(CFA_image[:, :, 1], -1, axis=1) + \
beta/2 * (CFA_image[:, :, 0] - np.roll(CFA_image[:, :, 0], -2, axis=1))
G_directions[:, :, 3] = CFA_image[:, :, 1] + \
np.roll(CFA_image[:, :, 1], 1, axis=1) + \
beta/2 * (CFA_image[:, :, 0] - np.roll(CFA_image[:, :, 0], 2, axis=1))
# Bilinear interpolation of red and blue channels
R_directions[:, :, 0] += CFA_image[:, :, 0]
R_directions[:, :, 1] += CFA_image[:, :, 0]
R_directions[:, :, 2] += CFA_image[:, :, 0]
R_directions[:, :, 3] += CFA_image[:, :, 0]
B_directions[:, :, 0] += CFA_image[:, :, 2]
B_directions[:, :, 1] += CFA_image[:, :, 2]
B_directions[:, :, 2] += CFA_image[:, :, 2]
B_directions[:, :, 3] += CFA_image[:, :, 2]
RG_directions[:, :, 0] += RG_directions[:, :, 0] - \
beta*G_directions[:, :, 0]
RG_directions[:, :, 1] += RG_directions[:, :, 1] - \
beta*G_directions[:, :, 1]
RG_directions[:, :, 2] += RG_directions[:, :, 2] - \
beta*G_directions[:, :, 2]
RG_directions[:, :, 3] += RG_directions[:, :, 3] - \
beta*G_directions[:, :, 3]
BG_directions[:, :, 0] += BG_directions[:, :, 0] - \
beta*G_directions[:, :, 0]
BG_directions[:, :, 1] += BG_directions[:, :, 1] - \
beta*G_directions[:, :, 1]
BG_directions[:, :, 2] += BG_directions[:, :, 2] - \
beta*G_directions[:, :, 2]
BG_directions[:, :, 3] += BG_directions[:, :, 3] - \
beta*G_directions[:, :, 3]
# Bi-linear interpolation
kernel = 1 / 4 * np.array([
[1, 2, 1],
[2, 4, 2],
[1, 2, 1]
])
R_directions[:, :, 0] = convolve2d(
R_directions[:, :, 0], kernel, mode='same')
R_directions[:, :, 1] = convolve2d(
R_directions[:, :, 1], kernel, mode='same')
R_directions[:, :, 2] = convolve2d(
R_directions[:, :, 2], kernel, mode='same')
R_directions[:, :, 3] = convolve2d(
R_directions[:, :, 3], kernel, mode='same')
B_directions[:, :, 0] = convolve2d(
B_directions[:, :, 0], kernel, mode='same')
B_directions[:, :, 1] = convolve2d(
B_directions[:, :, 1], kernel, mode='same')
B_directions[:, :, 2] = convolve2d(
B_directions[:, :, 2], kernel, mode='same')
B_directions[:, :, 3] = convolve2d(
B_directions[:, :, 3], kernel, mode='same')
R_directions += beta * G_directions
B_directions += beta * G_directions
gradients = compute_gradients(R_directions, G_directions, B_directions, L)
inv_gradients = 1/(gradients+epsilon)
sommme_gradients = np.sum(inv_gradients, axis=2)
inv_gradients[:, :, 0] = inv_gradients[:, :, 0] / sommme_gradients
inv_gradients[:, :, 1] = inv_gradients[:, :, 1] / sommme_gradients
inv_gradients[:, :, 2] = inv_gradients[:, :, 2] / sommme_gradients
inv_gradients[:, :, 3] = inv_gradients[:, :, 3] / sommme_gradients
R_1 = np.sum(R_directions * inv_gradients, axis=2)
G_1 = np.sum(G_directions * inv_gradients, axis=2)
B_1 = np.sum(B_directions * inv_gradients, axis=2)
return R_1, G_1, B_1
\ No newline at end of file
import numpy as np
def nonlocal_int(R_0, G_0, B_0, beta, h, k, rho, N, cfa_mask) :
"""
Nonlocal interpolation algorithm
Inputs :
- R_0, G_0, B_0 : The initial reconstructed channels
- beta : the parameter of intercorrelation between the channels
- h : the filtering parameter
- k : the half-size of the search window
- rho : the size of the
- N : the number of iterations
Outputs :
- R, G, B : the reconstructed channels
"""
height, width = R_0.shape
u_0 = np.stack((R_0, G_0, B_0), axis=2)
d = np.ones((height, width, height, width)) * np.inf
w = np.zeros((height, width, N))
ksi = np.zeros((height, width,2))
# Compute weight distribution on initialization u0
R = np.zeros_like(R_0)
G = np.zeros_like(G_0)
B = np.zeros_like(B_0)
index_image = np.zeros((height, width,N, 2))
for i in range(height) :
for j in range(width) :
# p = (i,j)
for h in range(max(0, i-k), min(height, i+k+1)) :
for w in range(max(0, j-k), min(width, j+k+1)) :
# q = m,n
d[i,j,h,w] = np.sum((u_0[i-rho:i+rho+1, j-rho:j+rho+1] - u_0[h-rho:h+rho+1, w-rho:w+rho+1])**2)
d[i,j,i,j] = np.inf
d[i,j,i,j] = np.min(d[i,j])
sorted_image = np.sort(d[i,j], axis=None)
reconstructed_image = np.zeros((height, width))
for t in range(height*width) :
reconstructed_image[t//width, t%width] = sorted_image[t]
for n in range(N) :
index_image[i,j,n] = np.where(d[i,j] == sorted_image[n])
w[i,j,n] = np.exp(-d[i,j,0,n]/(h**2))
ksi[i,j] = np.sum(w[i,j])
for n in range(N) :
w[i,j,n] = w[i,j,n]/ksi[i,j]
# Enhancement of green channel
for i in range(height) :
for j in range(width) :
if not cfa_mask[i,j,1] == 1 : # Not in green CFA
for n in range(N) :
green_qn = G_0[int(index_image[i,j,n,0]), int(index_image[i,j,n,1])] # G0(qn)
red_qn = R_0[int(index_image[i,j,n,0]), int(index_image[i,j,n,1])]
blue_qn = B_0[int(index_image[i,j,n,0]), int(index_image[i,j,n,1])]
G[i,j] += w[i,j,n] * (green_qn - beta * (red_qn + blue_qn)) + beta * (R_0[i,j] + B_0[i,j])
# Enhancement of red and blue channels
for i in range(height) :
for j in range(width) :
if not cfa_mask[i,j,0] == 1 : # Not in red CFA
for n in range(N) :
red_qn = R_0[int(index_image[i,j,n,0]), int(index_image[i,j,n,1])] # R0(qn)
G_qn = G_0[int(index_image[i,j,n,0]), int(index_image[i,j,n,1])]
R[i,j] += w[i,j,n] * (red_qn - beta * G_qn) + beta * G_0[i,j]
if not cfa_mask[i,j,2] == 1 : # Not in blue CFA
for n in range(N) :
blue_qn = B_0[int(index_image[i,j,n,0]), int(index_image[i,j,n,1])] # B0(qn)
G_qn = G_0[int(index_image[i,j,n,0]), int(index_image[i,j,n,1])]
B[i,j] += w[i,j,n] * (blue_qn - beta * G_qn) + beta * G_0[i,j]
return R, G, B
\ No newline at end of file
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
import matplotlib.pyplot as plt
from local_int import local_int
from forward_model import CFA
from demo_reconstruction import naive_interpolation
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
if cfa == "quad_bayer":
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
return naive_interpolation(op, y)
# Performing the reconstruction.
cfa_obj = CFA(cfa, y.shape)
cfa_mask = cfa_obj.mask
R_0, G_0, B_0 = local_int(y, 0.7, 12, 1e-8, cfa_mask)
return np.stack((R_0, G_0, B_0), axis=2)
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
### How to use ?
There are 2 functions that can be called in reconstruct.py :
f.cfa_reconstruction_1 that uses the method 1
and f.cfa_reconstruction that uses the method 2
They use the same arguments which are y , cfa and input_shape.
File added
import numpy as np
import matplotlib.pyplot as plt
from src.checks import check_cfa, check_rgb
from src.forward_model import CFA
import src.methods.Samanos_Thomas.version1 as v1
import src.methods.Samanos_Thomas.version2 as v2
from scipy import ndimage
def cfa_reconstruction(y: np.ndarray, cfa:str, input_shape: tuple) -> np.ndarray:
"""Performs demosaicking on y.
Args:
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
if cfa == "bayer":
op = v2.bayer_reconstruction(y,input_shape)
elif cfa == "quad_bayer":
op = v2.quad_bayer_reconstruction(y,input_shape)
else:
op = np.zeros(input_shape)
return op
def cfa_reconstruction_1(y: np.ndarray, cfa:str, input_shape: tuple) -> np.ndarray:
"""Performs demosaicking on y.
Args:
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
if cfa == "bayer":
op = v1.bayer_reconstruction(y,input_shape)
elif cfa == "quad_bayer":
op = v1.quad_bayer_reconstruction(y,input_shape)
else:
op = np.zeros(input_shape)
return op
\ No newline at end of file
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
import src.methods.Samanos_Thomas.function as f
from src.forward_model import CFA
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
input_shape = (y.shape[0], y.shape[1], 3)
Y = f.cfa_reconstruction(y, cfa,input_shape)
return Y
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
import numpy as np
from skimage.transform import resize
def upscale(img: np.ndarray, shape: tuple, pas: int) -> np.ndarray:
"""Upscales the acquisition to the shape of the desired reconstruction.
Args:
img (np.ndarray): mosaique image in smaller size.
shape (tuple): Shape of the output.
Returns:
np.ndarray: Upscaled image.
"""
return resize(img, shape, anti_aliasing=True)
def bayer_reconstruction(y : np.ndarray, input_shape : tuple) -> np.ndarray:
op_shape = (input_shape[0]//2,input_shape[1]//2,3)
op = np.zeros(op_shape)
conv = np.zeros((2,2,3))
liste = [(0,1,0),(0,0,1),(1,1,1),(1,0,2)]
for el in liste :
conv[el]=1
if el[2]==1:
conv[el]=1/2
for i in range (3):
op[:,:,i] = conv2d(y,conv[:,:,i], 2)
op1 = upscale(op,(input_shape[0],input_shape[1],3),2)
return op1
def quad_bayer_reconstruction(y : np.ndarray, input_shape : tuple) -> np.ndarray:
op_shape = (input_shape[0]//4,input_shape[1]//4,3)
op = np.zeros(op_shape)
conv = np.zeros((4,4,3))
liste = [(2,0,2),(3,0,2),(2,1,2),(3,1,2),(0,0,1),(1,0,1),(0,1,1),(1,1,1),(2,2,1),(2,3,1),(3,2,1),(3,3,1),(0,2,0),(0,3,0),(1,2,0),(1,3,0)]
for el in liste :
conv[el]=1/4
if el[2]==1:
conv[el]=1/8
for i in range (3):
op[:,:,i] = conv2d(y,conv[:,:,i], 4)
op1 = upscale(op,(input_shape[0],input_shape[1],3),4)
return op1
def conv2d(img : np.ndarray, conv : np.ndarray, pas :int):
y_shape = (img.shape[0], img.shape[1])
op = np.zeros((y_shape[0]//pas,y_shape[1]//pas))
for i in range(0,y_shape[0],pas):
for j in range(0,y_shape[1],pas):
op[i//pas,j//pas] = np.sum(img[i:i+pas,j:j+pas]*conv)
return op
import numpy as np
from scipy.signal import convolve2d
CONV2 = np.array([[1/4,1/2,1/4],[1/2,1,1/2],[1/4,1/2,1/4]])
def bayer_reconstruction(y : np.ndarray, input_shape : tuple) -> np.ndarray:
op_shape = (input_shape[0],input_shape[1],3)
op = np.zeros(op_shape)
conv = np.zeros((2,2,3))
liste = [(0,1,0),(0,0,1),(1,1,1),(1,0,2)]
for el in liste :
conv[el]=1
op = masq(y,conv, 2)
op1 = np.zeros_like(op)
for color in range(3):
op1[:,:,color] = convolve2d(op[:,:,color],CONV2,mode ="same")
if color == 1 :
op1[:,:,color] = op1[:,:,color]/2
return op1
CONV4 = np.array([[11/156,43/312,2/13,43/312,11/156],
[43/312,1/6,1/4,1/6,43/312],
[2/13,1/4,1/3,1/4,2/13],
[43/312,1/6,1/4,1/6,43/312],
[11/156,43/312,2/13,43/312,11/156]])
def quad_bayer_reconstruction(y : np.ndarray, input_shape : tuple) -> np.ndarray:
op_shape = (input_shape[0],input_shape[1],3)
op = np.zeros(op_shape)
conv = np.zeros((4,4,3))
liste = [(2,0,2),(3,0,2),(2,1,2),(3,1,2),(0,0,1),(1,0,1),(0,1,1),(1,1,1),(2,2,1),(2,3,1),(3,2,1),(3,3,1),(0,2,0),(0,3,0),(1,2,0),(1,3,0)]
for el in liste :
conv[el]=1
op = masq(y,conv, 4)
op1 = np.zeros_like(op)
for color in range(3):
op1[:,:,color] = convolve2d(op[:,:,color],CONV4,mode ="same")
if color == 1 :
op1[:,:,color] = op1[:,:,color]/2
return op1
def masq(img : np.ndarray,conv:np.ndarray, pas:int) ->np.ndarray:
masq = np.zeros((img.shape[0],img.shape[1],3))
po = np.zeros((img.shape[0],img.shape[1],3))
for color in range (3):
for i in range(0,po.shape[0],pas):
for j in range(0,po.shape[1],pas):
masq[i:i+pas,j:j+pas,color] = conv[:,:,color]
po[:,:,color] = masq[:,:,color] * img
return po
\ No newline at end of file
File added
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 12 07:38:51 2024
@author: etudiant
"""
import numpy as np
from scipy.signal import convolve2d
from src.forward_model import CFA
from scipy.ndimage import convolve
def malvar_demosaicing(op: CFA, y: np.ndarray) -> np.ndarray:
"""
Performs a malvar interpolation.
Args:
op (CFA): CFA operator.
y (np.ndarray): Mosaicked image.
Returns:
np.ndarray: Demosaicked image.
"""
z = op.adjoint(y)
# Definnig Masks
R_m = op.mask[:, :, 0]
G_m = op.mask[:, :, 1]
B_m = op.mask[:, :, 2]
GR_GB = np.array([
[0.0, 0.0, -1.0, 0.0, 0.0],
[0.0, 0.0, 2.0, 0.0, 0.0],
[-1.0, 2.0, 4.0, 2.0, -1.0],
[0.0, 0.0, 2.0, 0.0, 0.0],
[0.0, 0.0, -1.0, 0.0, 0.0],
]) / 8
Rg_RB_Bg_BR = np.array(
[
[0.0, 0.0, 0.5, 0.0, 0.0],
[0.0, -1.0, 0.0, -1.0, 0.0],
[-1.0, 4.0, 5.0, 4.0, -1.0],
[0.0, -1.0, 0.0, -1.0, 0.0],
[0.0, 0.0, 0.5, 0.0, 0.0],
]) / 8
Rg_BR_Bg_RB = np.transpose(Rg_RB_Bg_BR)
Rb_BB_Br_RR = np.array(
[
[0.0, 0.0, -1.5, 0.0, 0.0],
[0.0, 2.0, 0.0, 2.0, 0.0],
[-1.5, 0.0, 6.0, 0.0, -1.5],
[0.0, 2.0, 0.0, 2.0, 0.0],
[0.0, 0.0, -1.5, 0.0, 0.0],
]) / 8
R = y * R_m
G = y * G_m
B = y * B_m
del G_m
G = np.where(np.logical_or(R_m == 1, B_m == 1), convolve(y, GR_GB), G)
RBg_RBBR = convolve(y, Rg_RB_Bg_BR)
RBg_BRRB = convolve(y, Rg_BR_Bg_RB)
RBgr_BBRR = convolve(y, Rb_BB_Br_RR)
del GR_GB, Rg_RB_Bg_BR, Rg_BR_Bg_RB, Rb_BB_Br_RR
# Red rows.
R_r = np.transpose(np.any(R_m == 1, axis=1)[None]) * np.ones(R.shape)
# Red columns.
R_c = np.any(R_m == 1, axis=0)[None] * np.ones(R.shape)
# Blue rows.
B_r = np.transpose(np.any(B_m == 1, axis=1)[None]) * np.ones(B.shape)
# Blue columns
B_c = np.any(B_m == 1, axis=0)[None] * np.ones(B.shape)
del R_m, B_m
R = np.where(np.logical_and(R_r == 1, B_c == 1), RBg_RBBR, R)
R = np.where(np.logical_and(B_r == 1, R_c == 1), RBg_BRRB, R)
B = np.where(np.logical_and(B_r == 1, R_c == 1), RBg_RBBR, B)
B = np.where(np.logical_and(R_r == 1, B_c == 1), RBg_BRRB, B)
R = np.where(np.logical_and(B_r == 1, B_c == 1), RBgr_BBRR, R)
B = np.where(np.logical_and(R_r == 1, R_c == 1), RBgr_BBRR, B)
del RBg_RBBR, RBg_BRRB, RBgr_BBRR, R_r, R_c, B_r, B_c
# Combine channels
return np.clip(np.stack((R, G, B), axis=-1), 0, 1)
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.forward_model import CFA
from src.methods.Samia_Bouddahab.function import malvar_demosaicing
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
# Performing the reconstruction.
# TODO
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
res = malvar_demosaicing(op, y)
return np.zeros(op.input_shape)
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
File added
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from scipy.ndimage import generic_filter, convolve
import warnings
from tqdm import tqdm
from src.forward_model import CFA
def regression_filter_func(chans):
"""computes the linear regression coefficients between channel :
red and green
blue and green
green and red
green and blue
Returns:
a is high order coef and b is 0 order.
_type_: a_red, b_red, a_blue, b_blue, a_green_red, b_green_red, a_green_blue, b_green_blue
"""
red = chans[:,:,0].flatten()
green = chans[:,:,1].flatten()
blue = chans[:,:,2].flatten()
nonzeros_red = np.nonzero(red)
nonzeros_blue = np.nonzero(blue)
return np.concatenate((np.polyfit(red[nonzeros_red], green[nonzeros_red], deg=1), np.polyfit(blue[nonzeros_blue], green[nonzeros_blue], deg=1), np.polyfit(green[nonzeros_red], red[nonzeros_red], deg=1), np.polyfit(green[nonzeros_blue], blue[nonzeros_blue], deg=1)), axis=0)
def personal_generic_filter(a, func, footprint, output, msg=""):
"""Apply func on each footprint of a.
"""
# Suppress RankWarning
with warnings.catch_warnings():
warnings.simplefilter('ignore', np.RankWarning)
for i in tqdm(range(a.shape[0]), desc=msg):
for j in range(a.shape[1]):
fen = np.ones((footprint.shape[0], footprint.shape[1], a.shape[2]))
if i+footprint.shape[0] > a.shape[0]:
i_end = a.shape[0]
i_start = i_end - footprint.shape[0]
else:
i_start = i
i_end = i + footprint.shape[0]
if j+footprint.shape[1] > a.shape[1]:
j_end = a.shape[1]
j_start = j_end - footprint.shape[1]
else:
j_start = j
j_end = j + footprint.shape[1]
fen[:i_end-i_start, :j_end-j_start] = a[i_start:i_end, j_start:j_end]
output[i, j, :] = func(a[i_start:i_end, j_start:j_end])
def combine_directions(bh,bv):
combo = np.zeros(bh.shape)
combo[:,:,0] = bh[:,:,0] + bv[:,:,0]
combo[:,:,0][(bh[:,:,0]!=0) * (bv[:,:,0]!=0)] = (bh[:,:,0][(bh[:,:,0]!=0) * (bv[:,:,0]!=0)] + bv[:,:,0][(bh[:,:,0]!=0) * (bv[:,:,0]!=0)])/2
combo[:,:,0][(bh[:,:,0]==0) * (bv[:,:,0]==0)] = 0
combo[:,:,1] = bh[:,:,1]/2 + bv[:,:,1]/2
combo[:,:,2] = bh[:,:,2] + bv[:,:,2]
combo[:,:,2][(bh[:,:,2]!=0) * (bv[:,:,2]!=0)] = (bh[:,:,2][(bh[:,:,2]!=0) * (bv[:,:,2]!=0)] + bv[:,:,2][(bh[:,:,2]!=0) * (bv[:,:,2]!=0)])/2
combo[:,:,2][(bh[:,:,2]==0) * (bv[:,:,2]==0)] = 0
for i in range(combo.shape[0]):
for j in range(combo.shape[1]):
moy = []
if i != 0 :
moy.append(combo[i-1,j,:])
if i != combo.shape[0]-1 :
moy.append(combo[i+1,j,:])
if j != 0 :
moy.append(combo[i,j-1,:])
if j != combo.shape[1]-1 :
moy.append(combo[i,j+1,:])
moy = np.stack(moy).mean(axis=0)
if combo[i,j,0] == 0:
combo[i,j,0] = moy[0]
if combo[i,j,2] == 0:
combo[i,j,2] = moy[2]
return combo
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
# Performing the reconstruction.
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
adjoint = op.adjoint(y)
bh = np.zeros(adjoint.shape)
# Horizontal interpolation
for i in range(3):
for j in range(adjoint.shape[0]):
non_zero = np.nonzero(adjoint[j, :, i])[0]
if len(non_zero) == 0:
continue
bh[j, :, i] = np.interp(np.arange(adjoint.shape[1]), non_zero, adjoint[j, :, i][non_zero])
bv = np.zeros(adjoint.shape)
# Vertical interpolation
for i in range(3):
for j in range(adjoint.shape[1]):
non_zero = np.nonzero(adjoint[:, j, i])[0]
if len(non_zero) == 0:
continue
bv[:, j, i] = np.interp(np.arange(adjoint.shape[0]), non_zero, adjoint[:, j, i][non_zero])
# Residuals interpolation
M = 3
N = 5
kernel_hor = np.ones((M, N))/(M*N)
kernel_ver = np.ones((N, M))/(M*N)
# Horizontal Regression filtering
regh = np.zeros((bh.shape[0], bh.shape[1], 8))
personal_generic_filter(bh, regression_filter_func, kernel_hor, regh,msg="Regression filtering horizontal")
ch = np.zeros(bh.shape)
for i in range(regh.shape[-1]):
regh[:, :, i] = convolve(regh[:, :, i], kernel_hor)
ch[:,:,0] = regh[:,:,0]*bh[:,:,0] + regh[:,:,1]
ch[:,:,1] = (regh[:,:,4]*bh[:,:,1] + regh[:,:,5] + regh[:,:,6]*bh[:,:,1] + regh[:,:,7])/2
ch[:,:,2] = regh[:,:,2]*bh[:,:,2] + regh[:,:,3]
# return ch[:,:,1]
dh = ch - adjoint
dh[adjoint==0] = 0
# interpolation
for i in range(3):
for j in range(adjoint.shape[0]):
non_zero = np.nonzero(dh[j, :, i])[0]
if len(non_zero) == 0:
continue
ch[j, :, i] -= np.interp(np.arange(adjoint.shape[1]), non_zero, dh[j, :, i][non_zero])
ch[bh == 0] = 0
bh = ch.copy()
# return bh[:,:,0]
# Vertical Regression filtering
regv = np.zeros((bv.shape[0], bv.shape[1], 8))
personal_generic_filter(bv, regression_filter_func, kernel_ver, regv,msg="Regression filtering vertical")
cv = np.zeros(bv.shape)
for i in range(regv.shape[-1]):
regv[:, :, i] = convolve(regv[:, :, i], kernel_ver)
cv[:,:,0] = regv[:,:,0]*bv[:,:,0] + regv[:,:,1]
cv[:,:,1] = (regv[:,:,4]*bv[:,:,1] + regv[:,:,5] + regv[:,:,6]*bv[:,:,1] + regv[:,:,7])/2
cv[:,:,2] = regv[:,:,2]*bv[:,:,2] + regv[:,:,3]
dv = cv - adjoint
dv[adjoint==0] = 0
# interpolation
for i in range(3):
for j in range(adjoint.shape[1]):
non_zero = np.nonzero(dv[:, j, i])[0]
if len(non_zero) == 0:
continue
cv[:, j, i] -= np.interp(np.arange(adjoint.shape[0]), non_zero, dv[:, j, i][non_zero])
cv[bv == 0] = 0
bv = cv.copy()
return combine_directions(bh,bv)
if __name__ == "__main__":
from src.utils import load_image, save_image, psnr, ssim
from src.forward_model import CFA
image_path = 'images/img_4.png'
img = load_image(image_path)
cfa_name = 'bayer' # bayer or quad_bayer
op = CFA(cfa_name, img.shape)
y = op.direct(img)
res = run_reconstruction(y, cfa_name)
print('PSNR:', psnr(img, res))
print('SSIM:', ssim(img, res))
save_image('output/bh.png', res)
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
File added