Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • samanost/sicom_image_analysis_project
  • gerayelk/sicom_image_analysis_project
  • jelassiy/sicom_image_analysis_project
  • chardoto/sicom_image_analysis_project
  • chaarim/sicom_image_analysis_project
  • domers/sicom_image_analysis_project
  • elmurrt/sicom_image_analysis_project
  • sadonest/sicom_image_analysis_project
  • kouddann/sicom_image_analysis_project
  • mirabitj/sicom-image-analysis-project-mirabito
  • plotj/sicom_image_analysis_project
  • torrem/sicom-image-analysis-project-maxime-torre
  • dzike/sicom_image_analysis_project
  • daip/sicom_image_analysis_project
  • casanovv/sicom_image_analysis_project
  • girmarti/sicom_image_analysis_project
  • lioretn/sicom_image_analysis_project
  • lemoinje/sicom_image_analysis_project
  • ouahmanf/sicom_image_analysis_project
  • vouilloa/sicom_image_analysis_project
  • diopb/sicom_image_analysis_project
  • davidale/sicom_image_analysis_project
  • enza/sicom_image_analysis_project
  • conversb/sicom_image_analysis_project
  • mullemat/sicom_image_analysis_project
25 results
Show changes
Commits on Source (114)
Showing
with 737 additions and 0 deletions
File mode changed from 100755 to 100644
File added
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.forward_model import CFA
from src.methods.Chardon_tom.utils import *
import pywt
#!!!!!!!! It is normal that the reconstructions lasts several minutes (3min on my computer)
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
# Define constants and operators
cfa_name = 'bayer' # bayer or quad_bayer
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa_name, input_shape)
res = op.adjoint(y)
N,M = input_shape[0], input_shape[1]
#interpolating green channel
for i in range (N):
for j in range (M):
if res[i,j,1] ==0:
neighbors = get_neighbors(res,1,i,j,N,M)
weights = get_weights(res,i,j,1,N,M)
res[i,j,1] = interpolate_green(weights, neighbors)
#first intepolation of red channel
for i in range (1,N,2):
for j in range (0,M,2):
neighbors = get_neighbors(res,0,i,j,N,M)
neighbors_G = get_neighbors(res,1,i,j,N,M)
weights = get_weights(res,i,j,0,N,M)
res[i,j,0] = interpolate_red_blue(weights,neighbors, neighbors_G)
# second interpolation of red channel
for i in range (N):
for j in range (M):
if res[i,j,0] ==0:
neighbors = get_neighbors(res,0,i,j,N,M)
weights = get_weights(res,i,j,0,N,M)
res[i,j,0] = interpolate_green(weights, neighbors)
#first interpolation of blue channel
for i in range (0,N,2):
for j in range (1,M,2):
neighbors = get_neighbors(res,2,i,j,N,M)
neighbors_G = get_neighbors(res,1,i,j,N,M)
weights = get_weights(res,i,j,2,N,M)
res[i,j,2] = interpolate_red_blue(weights, neighbors, neighbors_G)
#second interpolation of blue channel
for i in range (N):
for j in range (M):
if res[i,j,2] ==0:
neighbors = get_neighbors(res,2,i,j,N,M)
weights = get_weights(res,i,j,2,N,M)
res[i,j,2] = interpolate_green(weights,neighbors)
# k=0
# while k<2 :
# for i in range(input_shape[0]):
# for j in range(input_shape[1]):
# res[i][j][1] = correction_green(res,i,j,N,M)
# for i in range(input_shape[0]):
# for j in range(input_shape[1]):
# res[i][j][0] = correction_red(res,i,j,N,M)
# for i in range(input_shape[0]):
# for j in range(input_shape[1]):
# res[i][j][2] = correction_blue(res,i,j,N,M)
# k+=1
res[res>1] = 1
res[res<0] = 0
return res
import numpy as np
import pywt
def get_neighbors (img,channel,i,j,N,M):
P1 = img[(i-1)%N,(j-1)%M,channel]
P2 = img[(i-1)%N,j%M,channel]
P3 = img[(i-1)%N,(j+1)%M,channel]
P4 = img[i%N,(j-1)%M,channel]
P5 = img[i%N,j%M,channel]
P6 = img[i%N,(j+1)%M,channel]
P7 = img[(i+1)%N,(j-1)%M,channel]
P8 = img[(i+1)%N,j%M,channel]
P9 = img[(i+1)%N,(j+1)%M,channel]
return np.array([P1,P2,P3,P4,P5,P6,P7,P8,P9])
def get_derivatives(neighbors):
[P1, P2, P3, P4, P5, P6, P7, P8, P9] = neighbors
D_x = (P4 - P6)/2
D_y = (P2 - P8)/2
D_xd = (P3 - P7)/(2*np.sqrt(2))
D_yd = (P1 - P9)/(2*np.sqrt(2))
return ([D_x, D_y, D_xd, D_yd])
def get_weights(mosaic_image, i, j, channel, N, M):
derivatives_neigbors = []
for l in range(-1, 2):
for L in range(-1, 2):
derivatives_neigbors.append(get_derivatives(
get_neighbors(mosaic_image, channel, i+l, j+L, N, M)))
[Dx, Dy, Dxd, Dyd] = derivatives_neigbors[4]
E1 = 1/np.sqrt(1 + Dyd**2 + derivatives_neigbors[0][3]**2)
E2 = 1/np.sqrt(1 + Dy**2 + derivatives_neigbors[1][1]**2)
E3 = 1/np.sqrt(1 + Dxd**2 + derivatives_neigbors[2][2]**2)
E4 = 1/np.sqrt(1 + Dx**2 + derivatives_neigbors[3][0]**2)
E6 = 1/np.sqrt(1 + Dxd**2 + derivatives_neigbors[5][2]**2)
E7 = 1/np.sqrt(1 + Dy**2 + derivatives_neigbors[6][1]**2)
E8 = 1/np.sqrt(1 + Dyd**2 + derivatives_neigbors[7][3]**2)
E9 = 1/np.sqrt(1 + Dx**2 + derivatives_neigbors[8][0]**2)
E = [E1, E2, E3, E4, E6, E7, E8, E9]
return E
def interpolate_green(weights, neighbors):
[E1, E2, E3, E4, E6, E7, E8, E9] = weights
[P1, P2, P3, P4, P5, P6, P7, P8, P9] = neighbors
I5 = (E2*P2 + E4*P4 + E6*P6 + E8*P8)/(E2 + E4 + E6 + E8)
return (I5)
def interpolate_red_blue(weights, neighbors, green_neighbors):
[E1, E2, E3, E4, E6, E7, E8, E9] = weights
[P1, P2, P3, P4, P5, P6, P7, P8, P9] = neighbors
[G1, G2, G3, G4, G5, G6, G7, G8, G9] = green_neighbors
I5 = G5*(E1*P1/G1 + E3*P3/G3 + E7*P7/G7 + E9*P9/G9)/(E1 + E3 + E7 + E9)
return (I5)
def correction_green(res,i,j,N,M):
[G1,G2,G3,G4,G5,G6,G7,G8,G9] = get_neighbors(res,1,i,j,N,M)
[R1,R2,R3,R4,R5,R6,R7,R8,R9] = get_neighbors(res,0,i,j,N,M)
[B1,B2,B3,B4,B5,B6,B7,B8,B9] = get_neighbors(res,2,i,j,N,M)
[E1,E2,E3,E4,E6,E7,E8,E9] = get_weights(res,i,j,1,N,M)
Gb5 = R5*((E2*G2)/B2 + (E4*G4)/B4 + (E6*G6)/B6 + (E8*G8)/B8)/(E2 + E4 + E6 + E8)
Gr5 = B5*((E2*G2)/R2 + (E4*G4)/R4 + (E6*G6)/R6 + (E8*G8)/R8)/(E2 + E4 + E6 + E8)
G5 = (Gb5 + Gr5)/2
return G5
def correction_red(res,i,j,N,M) :
[G1,G2,G3,G4,G5,G6,G7,G8,G9] = get_neighbors(res,1,i,j,N,M)
[R1,R2,R3,R4,R5,R6,R7,R8,R9] = get_neighbors(res,0,i,j,N,M)
[E1,E2,E3,E4,E6,E7,E8,E9] = get_weights(res,i,j,0,N,M)
R5 = G5*((E1*R1)/G1 + (E2*R2)/G2 + (E3*R3)/G3 + (E4*R4)/G4 + (E6*R6)/G6 + (E7*R7)/G7 + (E8*R8)/G8 + (E9*R9)/G9)/(E1 + E2 + E3 + E4 + E6 + E7 + E8 + E9)
return R5
def correction_blue(res,i,j,N,M) :
[G1,G2,G3,G4,G5,G6,G7,G8,G9] = get_neighbors(res,1,i,j,N,M)
[B1,B2,B3,B4,B5,B6,B7,B8,B9] = get_neighbors(res,2,i,j,N,M)
[E1,E2,E3,E4,E6,E7,E8,E9] = get_weights(res,i,j,2,N,M)
B5 = G5*((E1*B1)/G1 + (E2*B2)/G2 + (E3*B3)/G3 + (E4*B4)/G4 + (E6*B6)/G6 + (E7*B7)/G7 + (E8*B8)/G8 + (E9*B9)/G9)/(E1 + E2 + E3 + E4 + E6 + E7 + E8 + E9)
return B5
File added
import numpy as np
from scipy.signal import correlate2d
from src.forward_model import CFA
def malvar_he_cutler(y: np.ndarray, op: CFA ) -> np.ndarray:
"""Performs demosaicing using the malvar-he-cutler algorithm
Args:
op (CFA): CFA operator.
y (np.ndarray): Mosaicked image.
Returns:
np.ndarray: Demosaicked image.
"""
red_mask, green_mask, blue_mask = [op.mask[:, :, 0], op.mask[:, :, 1], op.mask[:, :, 2]]
mosaicked_image = np.float32(y)
demosaicked_image = np.empty(op.input_shape)
if op.cfa == 'quad_bayer':
filters = get_quad_bayer_filters()
else:
filters = get_default_filters()
demosaicked_image = apply_demosaicking_filters(
mosaicked_image,demosaicked_image, red_mask, green_mask, blue_mask, filters
)
return demosaicked_image
def get_quad_bayer_filters():
coefficient_scale = 0.03125
return {
"G_at_R_and_B": np.array([
[0, 0, 0, 0, -1, -1, 0, 0, 0, 0],
[0, 0, 0, 0, -1, -1, 0, 0, 0, 0],
[0, 0, 0, 0, 2, 2, 0, 0, 0, 0],
[0, 0, 0, 0, 2, 2, 0, 0, 0, 0],
[-1, -1, 2, 2, 4, 4, 2, 2, -1, -1],
[-1, -1, 2, 2, 4, 4, 2, 2, -1, -1],
[0, 0, 0, 0, 2, 2, 0, 0, 0, 0],
[0, 0, 0, 0, 2, 2, 0, 0, 0, 0],
[0, 0, 0, 0, -1, -1, 0, 0, 0, 0],
[0, 0, 0, 0, -1, -1, 0, 0, 0, 0]
]) * coefficient_scale,
"R_at_GR_and_B_at_GB": np.array([
[0, 0, 0, 0, 0.5, 0.5, 0, 0, 0, 0],
[0, 0, 0, 0, 0.5, 0.5, 0, 0, 0, 0],
[0, 0, -1, -1, 0, 0, -1, -1, 0, 0],
[0, 0, -1, -1, 0, 0, -1, -1, 0, 0],
[-1, -1, 4, 4, 5, 5, 4, 4, -1, -1],
[-1, -1, 4, 4, 5, 5, 4, 4, -1, -1],
[0, 0, -1, -1, 0, 0, -1, -1, 0, 0],
[0, 0, -1, -1, 0, 0, -1, -1, 0, 0],
[0, 0, 0, 0, 0.5, 0.5, 0, 0, 0, 0],
[0, 0, 0, 0, 0.5, 0.5, 0, 0, 0, 0]
]) * coefficient_scale,
"R_at_GB_and_B_at_GR": np.array([
[0, 0, 0, 0, -1, -1, 0, 0, 0, 0],
[0, 0, 0, 0, -1, -1, 0, 0, 0, 0],
[0, 0, -1, -1, 4, 4, -1, -1, 0, 0],
[0, 0, -1, -1, 4, 4, -1, -1, 0, 0],
[0.5, 0.5, 0, 0, 5, 5, 0, 0, 0.5, 0.5],
[0.5, 0.5, 0, 0, 5, 5, 0, 0, 0.5, 0.5],
[0, 0, -1, -1, 4, 4, -1, -1, 0, 0],
[0, 0, -1, -1, 4, 4, -1, -1, 0, 0],
[0, 0, 0, 0, -1, -1, 0, 0, 0, 0],
[0, 0, 0, 0, -1, -1, 0, 0, 0, 0]
]) * coefficient_scale,
"R_at_B_and_B_at_R": np.array([
[0, 0, 0, 0, -1.5, -1.5, 0, 0, 0, 0],
[0, 0, 0, 0, -1.5, -1.5, 0, 0, 0, 0],
[0, 0, 2, 2, 0, 0, 2, 2, 0, 0],
[0, 0, 2, 2, 0, 0, 2, 2, 0, 0],
[-1.5, -1.5, 0, 0, 6, 6, 0, 0, -1.5, -1.5],
[-1.5, -1.5, 0, 0, 6, 6, 0, 0, -1.5, -1.5],
[0, 0, 2, 2, 0, 0, 2, 2, 0, 0],
[0, 0, 2, 2, 0, 0, 2, 2, 0, 0],
[0, 0, 0, 0, -1.5, -1.5, 0, 0, 0, 0],
[0, 0, 0, 0, -1.5, -1.5, 0, 0, 0, 0]
]) * coefficient_scale,
}
def get_default_filters():
coefficient_scale = 0.125
return {
"G_at_R_and_B": np.array([
[0, 0, -1, 0, 0],
[0, 0, 2, 0, 0],
[-1, 2, 4, 2, -1],
[0, 0, 2, 0, 0],
[0, 0, -1, 0, 0]
]) * coefficient_scale,
"R_at_GR_and_B_at_GB": np.array([
[0, 0, 0.5, 0, 0],
[0, -1, 0, -1, 0],
[-1, 4, 5, 4, -1],
[0, -1, 0, -1, 0],
[0, 0, 0.5, 0, 0]
]) * coefficient_scale,
"R_at_GB_and_B_at_GR": np.array([
[0, 0, -1, 0, 0],
[0, -1, 4, -1, 0],
[0.5, 0, 5, 0, 0.5],
[0, -1, 4, -1, 0],
[0, 0, -1, 0, 0]
]) * coefficient_scale,
"R_at_B_and_B_at_R": np.array([
[0, 0, -1.5, 0, 0],
[0, 2, 0, 2, 0],
[-1.5, 0, 6, 0, -1.5],
[0, 2, 0, 2, 0],
[0, 0, -1.5, 0, 0]
]) * coefficient_scale,
}
def apply_demosaicking_filters(image, res, red_mask, green_mask, blue_mask, filters):
red_channel = image * red_mask
green_channel = image * green_mask
blue_channel = image * blue_mask
# Create the green channel after applying a filter
green_channel = np.where(
np.logical_or(red_mask == 1, blue_mask == 1),
correlate2d(image, filters['G_at_R_and_B'], mode="same", boundary="symm"),
green_channel
)
# Define masks for extracting pixel values
red_row_mask = np.any(red_mask == 1, axis=1)[:, np.newaxis].astype(np.float32)
red_col_mask = np.any(red_mask == 1, axis=0)[np.newaxis].astype(np.float32)
blue_row_mask = np.any(blue_mask == 1, axis=1)[:, np.newaxis].astype(np.float32)
blue_col_mask = np.any(blue_mask == 1, axis=0)[np.newaxis].astype(np.float32)
def update_channel(channel, row_mask, col_mask, filter_key):
return np.where(
np.logical_and(row_mask == 1, col_mask == 1),
correlate2d(image, filters[filter_key], mode="same", boundary="symm"),
channel
)
# Update the red channel and blue channel
red_channel = update_channel(red_channel, red_row_mask, blue_col_mask, 'R_at_GR_and_B_at_GB')
red_channel = update_channel(red_channel, blue_row_mask, red_col_mask, 'R_at_GB_and_B_at_GR')
blue_channel = update_channel(blue_channel, blue_row_mask, red_col_mask, 'R_at_GR_and_B_at_GB')
blue_channel = update_channel(blue_channel, red_row_mask, blue_col_mask, 'R_at_GB_and_B_at_GR')
# Update R channel and B channel again
red_channel = update_channel(red_channel, blue_row_mask, blue_col_mask, 'R_at_B_and_B_at_R')
blue_channel = update_channel(blue_channel, red_row_mask, red_col_mask, 'R_at_B_and_B_at_R')
res[:, :, 0] = red_channel
res[:, :, 1] = green_channel
res[:, :, 2] = blue_channel
return res
\ No newline at end of file
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.forward_model import CFA
from src.methods.EL_MURR_Theresa.malvar import malvar_he_cutler
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
# Performing the reconstruction.
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
res = malvar_he_cutler(y,op)
return res
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
File added
import numpy as np
from scipy.signal import convolve2d
from src.forward_model import CFA
def malvar(y: np.ndarray, op: CFA) -> np.ndarray:
"""
Malvar-He-Cutler demosaicing algorithm for Bayer pattern.
"""
# Convert the mosaicked image to the initial estimated channels.
z = op.adjoint(y)
# Define convolution kernels
kernel_G_at_RB = np.array([[0, 0, -1, 0, 0], [0, 0, 2, 0, 0], [-1, 2, 4, 2, -1], [0, 0, 2, 0, 0], [0, 0, -1, 0, 0]]) / 8
kernel_RB_at_G = np.array([[0, 0, 0.5, 0, 0], [0, -1, 0, -1, 0], [-1, 4, 5, 4, -1], [0, -1, 0, -1, 0], [0, 0, 0.5, 0, 0]]) / 8
kernel_RB_at_RB = np.array([[0, 0, -1.5, 0, 0], [0, 2, 0, 2, 0], [-1.5, 0, 6, 0, -1.5], [0, 2, 0, 2, 0], [0, 0, -1.5, 0, 0]]) / 8
# Interpolate each channel
R = z[:, :, 0]
G = z[:, :, 1]
B = z[:, :, 2]
mask = op.mask
R_m, G_m, B_m = mask[:, :, 0], mask[:, :, 1], mask[:, :, 2]
# Interpolate G at R and B locations
G = np.where(np.logical_or(R_m == 1, B_m == 1), convolve2d(y, kernel_G_at_RB, mode='same'), G)
# Interpolate R at G and B locations, B at R and G locations
R = np.where(np.logical_or(G_m == 1, B_m == 1), convolve2d(y, kernel_RB_at_G, mode='same'), R)
B = np.where(np.logical_or(R_m == 1, G_m == 1), convolve2d(y, kernel_RB_at_G, mode='same'), B)
# Interpolate R at B locations and B at R locations
R = np.where(B_m == 1, convolve2d(y, kernel_RB_at_RB, mode='same'), R)
B = np.where(R_m == 1, convolve2d(y, kernel_RB_at_RB, mode='same'), B)
# Combine channels
return np.clip(np.stack((R, G, B), axis=-1), 0, 1)
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
\ No newline at end of file
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.methods.Elmehdi_lahmar.malvar import malvar
from src.forward_model import CFA
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""
Run the demosaicing process using Malvar-He-Cutler algorithm for Bayer pattern.
Args:
y (np.ndarray): The mosaicked image to be reconstructed.
cfa (str): Name of the CFA, expected to be 'bayer'.
Returns:
np.ndarray: The demosaicked image.
"""
if cfa != 'bayer':
raise ValueError("Malvar-He-Cutler demosaicing only supports Bayer CFA pattern.")
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
return malvar(y, op)
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
# Importing libraries
import os
import colour
from colour_demosaicing import (
demosaicing_CFA_Bayer_bilinear,
demosaicing_CFA_Bayer_Malvar2004,
demosaicing_CFA_Bayer_Menon2007,
mosaicing_CFA_Bayer)
from src.utils import psnr,ssim
# Image path
image_pathes = ['images/img_1.png','images/img_2.png','images/img_3.png','images/img_4.png']
for i in image_pathes:
LIGHTHOUSE_IMAGE = colour.io.read_image(i)
img = LIGHTHOUSE_IMAGE
colour.plotting.plot_image(
colour.cctf_encoding(LIGHTHOUSE_IMAGE))
# Mosaicing
CFA = mosaicing_CFA_Bayer(LIGHTHOUSE_IMAGE)
colour.plotting.plot_image(
colour.cctf_encoding(CFA),
text_kwargs={'text': 'Lighthouse - CFA - RGGB'})
colour.plotting.plot_image(
colour.cctf_encoding(mosaicing_CFA_Bayer(LIGHTHOUSE_IMAGE, 'BGGR')),
text_kwargs={'text': 'Lighthouse - CFA - BGGR'});
# Demosaicing bilinear
colour.plotting.plot_image(
colour.cctf_encoding(demosaicing_CFA_Bayer_bilinear(CFA)),
text_kwargs={'text': 'Demosaicing - Bilinear'});
recons_bilinear = colour.cctf_encoding(demosaicing_CFA_Bayer_bilinear(CFA))
# demosaicing Malvar
recons_malvar = colour.cctf_encoding(demosaicing_CFA_Bayer_Malvar2004(CFA))
colour.plotting.plot_image(
colour.cctf_encoding(demosaicing_CFA_Bayer_Malvar2004(CFA)),
text_kwargs={'text': 'Demosaicing - Malvar (2004)'});
# demosaicing Menon
recons_menon = colour.cctf_encoding(demosaicing_CFA_Bayer_Menon2007(CFA))
colour.plotting.plot_image(
colour.cctf_encoding(demosaicing_CFA_Bayer_Menon2007(CFA)),
text_kwargs={'text': 'Demosaicing - Menon (2007)'});
print('bilinear : ')
print(f'PSNR: {psnr(img, recons_bilinear):.2f}')
print(f'SSIM: {ssim(img, recons_bilinear):.4f}')
print('Malvar : ')
print(f'PSNR: {psnr(img, recons_malvar):.2f}')
print(f'SSIM: {ssim(img, recons_malvar):.4f}')
print('Menon')
print(f'PSNR: {psnr(img, recons_menon):.2f}')
print(f'SSIM: {ssim(img, recons_menon):.4f}')
\ No newline at end of file
File added
File added
import numpy as np
def clip_values(img: np.ndarray) -> np.ndarray:
"""Clip values of an image between 0 and 1
Args:
img (np.ndarray): image to clip
Returns:
np.ndarray: clipped image
"""
# Clip values between 0 and 1
img[img < 0] = 0
img[img > 1] = 1
return img
import numpy as np
def find_direct_neighbors(neighbors: list) -> list:
"""
Calculate the gradients in horizontal, vertical, and diagonal directions for a pixel
based on its neighboring pixels.
Args:
neighbors (list): The neighbors of a pixel in a 3x3 grid. The order of neighbors
is assumed to be [top-left, top, top-right, right, center, left,
bottom-left, bottom, bottom-right].
Returns:
list: The gradients [horizontal, vertical, diagonal-x, diagonal-y].
"""
# Horizontal gradient: Difference between the right and left neighbors
horizontal_gradient = (neighbors[3] - neighbors[5]) / 2
# Vertical gradient: Difference between the top and bottom neighbors
vertical_gradient = (neighbors[1] - neighbors[7]) / 2
# Diagonal gradient along x-axis: Difference between top-right and bottom-left neighbors
diagonal_gradient_x = (neighbors[2] - neighbors[6]) / (2 * np.sqrt(2))
# Diagonal gradient along y-axis: Difference between top-left and bottom-right neighbors
diagonal_gradient_y = (neighbors[0] - neighbors[8]) / (2 * np.sqrt(2))
return [horizontal_gradient, vertical_gradient, diagonal_gradient_x, diagonal_gradient_y]
import numpy as np
def find_neighbors(img: np.ndarray, channel: int, i: int, j: int, N: int, M: int) -> list:
"""
Retrieve the 3x3 neighborhood of a pixel in a specified channel of an image.
The function wraps around the image edges to handle border pixels.
Args:
img (np.ndarray): The image to process.
channel (int): The index of the channel to process.
i (int): The row index of the pixel.
j (int): The column index of the pixel.
N (int): Height of the image.
M (int): Width of the image.
Returns:
np.ndarray: An array of neighbor pixel values in the specified channel.
"""
neighbors = []
for di in [-1, 0, 1]:
for dj in [-1, 0, 1]:
neighbor_i = (i + di) % N
neighbor_j = (j + dj) % M
neighbors.append(img[neighbor_i, neighbor_j, channel])
return np.array(neighbors)
import numpy as np
from .find_neighbors import find_neighbors
from .find_direct_neighbors import find_direct_neighbors
def find_weights(img: np.ndarray, direct_neighbors: list, channel: int, i: int, j: int, N: int, M: int) -> list:
"""
Find the weights of the neighbors of a pixel in the image.
Args:
img (np.ndarray): The image to process.
direct_neighbors (list): The list of direct neighbors of the pixel.
channel (int): The index of the channel to process.
i (int): The row index of the pixel.
j (int): The column index of the pixel.
N (int): Height of the image.
M (int): Width of the image.
Returns:
list: The list of weights of the neighbors of the pixel.
"""
[Dx, Dy, Dxx, Dyy] = direct_neighbors
E = []
c = 1
for k in [-1, 0, 1]:
for l in [-1, 0, 1]:
n = find_neighbors(img, channel, i + k, j + l, N, M)
dd = find_direct_neighbors(n)
sqrt_arguments = {
1: 1 + Dyy * 2 + dd[3] * 2,
3: 1 + Dxx * 2 + dd[2] * 2,
2: 1 + Dy * 2 + dd[1] * 2,
4: 1 + Dx * 2 + dd[0] * 2
}
value = sqrt_arguments.get(c, 1)
if value < 0:
E.append(0)
else:
E.append(1 / np.sqrt(value))
c += 1
return E