Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • samanost/sicom_image_analysis_project
  • gerayelk/sicom_image_analysis_project
  • jelassiy/sicom_image_analysis_project
  • chardoto/sicom_image_analysis_project
  • chaarim/sicom_image_analysis_project
  • domers/sicom_image_analysis_project
  • elmurrt/sicom_image_analysis_project
  • sadonest/sicom_image_analysis_project
  • kouddann/sicom_image_analysis_project
  • mirabitj/sicom-image-analysis-project-mirabito
  • plotj/sicom_image_analysis_project
  • torrem/sicom-image-analysis-project-maxime-torre
  • dzike/sicom_image_analysis_project
  • daip/sicom_image_analysis_project
  • casanovv/sicom_image_analysis_project
  • girmarti/sicom_image_analysis_project
  • lioretn/sicom_image_analysis_project
  • lemoinje/sicom_image_analysis_project
  • ouahmanf/sicom_image_analysis_project
  • vouilloa/sicom_image_analysis_project
  • diopb/sicom_image_analysis_project
  • davidale/sicom_image_analysis_project
  • enza/sicom_image_analysis_project
  • conversb/sicom_image_analysis_project
  • mullemat/sicom_image_analysis_project
25 results
Show changes
Showing
with 1764 additions and 0 deletions
import numpy as np
import matplotlib.pyplot as plt
from src.checks import check_cfa, check_rgb
from src.forward_model import CFA
import src.methods.Samanos_Thomas.version1 as v1
import src.methods.Samanos_Thomas.version2 as v2
from scipy import ndimage
def cfa_reconstruction(y: np.ndarray, cfa:str, input_shape: tuple) -> np.ndarray:
"""Performs demosaicking on y.
Args:
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
if cfa == "bayer":
op = v2.bayer_reconstruction(y,input_shape)
elif cfa == "quad_bayer":
op = v2.quad_bayer_reconstruction(y,input_shape)
else:
op = np.zeros(input_shape)
return op
def cfa_reconstruction_1(y: np.ndarray, cfa:str, input_shape: tuple) -> np.ndarray:
"""Performs demosaicking on y.
Args:
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
if cfa == "bayer":
op = v1.bayer_reconstruction(y,input_shape)
elif cfa == "quad_bayer":
op = v1.quad_bayer_reconstruction(y,input_shape)
else:
op = np.zeros(input_shape)
return op
\ No newline at end of file
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
import src.methods.Samanos_Thomas.function as f
from src.forward_model import CFA
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
input_shape = (y.shape[0], y.shape[1], 3)
Y = f.cfa_reconstruction(y, cfa,input_shape)
return Y
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
import numpy as np
from skimage.transform import resize
def upscale(img: np.ndarray, shape: tuple, pas: int) -> np.ndarray:
"""Upscales the acquisition to the shape of the desired reconstruction.
Args:
img (np.ndarray): mosaique image in smaller size.
shape (tuple): Shape of the output.
Returns:
np.ndarray: Upscaled image.
"""
return resize(img, shape, anti_aliasing=True)
def bayer_reconstruction(y : np.ndarray, input_shape : tuple) -> np.ndarray:
op_shape = (input_shape[0]//2,input_shape[1]//2,3)
op = np.zeros(op_shape)
conv = np.zeros((2,2,3))
liste = [(0,1,0),(0,0,1),(1,1,1),(1,0,2)]
for el in liste :
conv[el]=1
if el[2]==1:
conv[el]=1/2
for i in range (3):
op[:,:,i] = conv2d(y,conv[:,:,i], 2)
op1 = upscale(op,(input_shape[0],input_shape[1],3),2)
return op1
def quad_bayer_reconstruction(y : np.ndarray, input_shape : tuple) -> np.ndarray:
op_shape = (input_shape[0]//4,input_shape[1]//4,3)
op = np.zeros(op_shape)
conv = np.zeros((4,4,3))
liste = [(2,0,2),(3,0,2),(2,1,2),(3,1,2),(0,0,1),(1,0,1),(0,1,1),(1,1,1),(2,2,1),(2,3,1),(3,2,1),(3,3,1),(0,2,0),(0,3,0),(1,2,0),(1,3,0)]
for el in liste :
conv[el]=1/4
if el[2]==1:
conv[el]=1/8
for i in range (3):
op[:,:,i] = conv2d(y,conv[:,:,i], 4)
op1 = upscale(op,(input_shape[0],input_shape[1],3),4)
return op1
def conv2d(img : np.ndarray, conv : np.ndarray, pas :int):
y_shape = (img.shape[0], img.shape[1])
op = np.zeros((y_shape[0]//pas,y_shape[1]//pas))
for i in range(0,y_shape[0],pas):
for j in range(0,y_shape[1],pas):
op[i//pas,j//pas] = np.sum(img[i:i+pas,j:j+pas]*conv)
return op
import numpy as np
from scipy.signal import convolve2d
CONV2 = np.array([[1/4,1/2,1/4],[1/2,1,1/2],[1/4,1/2,1/4]])
def bayer_reconstruction(y : np.ndarray, input_shape : tuple) -> np.ndarray:
op_shape = (input_shape[0],input_shape[1],3)
op = np.zeros(op_shape)
conv = np.zeros((2,2,3))
liste = [(0,1,0),(0,0,1),(1,1,1),(1,0,2)]
for el in liste :
conv[el]=1
op = masq(y,conv, 2)
op1 = np.zeros_like(op)
for color in range(3):
op1[:,:,color] = convolve2d(op[:,:,color],CONV2,mode ="same")
if color == 1 :
op1[:,:,color] = op1[:,:,color]/2
return op1
CONV4 = np.array([[11/156,43/312,2/13,43/312,11/156],
[43/312,1/6,1/4,1/6,43/312],
[2/13,1/4,1/3,1/4,2/13],
[43/312,1/6,1/4,1/6,43/312],
[11/156,43/312,2/13,43/312,11/156]])
def quad_bayer_reconstruction(y : np.ndarray, input_shape : tuple) -> np.ndarray:
op_shape = (input_shape[0],input_shape[1],3)
op = np.zeros(op_shape)
conv = np.zeros((4,4,3))
liste = [(2,0,2),(3,0,2),(2,1,2),(3,1,2),(0,0,1),(1,0,1),(0,1,1),(1,1,1),(2,2,1),(2,3,1),(3,2,1),(3,3,1),(0,2,0),(0,3,0),(1,2,0),(1,3,0)]
for el in liste :
conv[el]=1
op = masq(y,conv, 4)
op1 = np.zeros_like(op)
for color in range(3):
op1[:,:,color] = convolve2d(op[:,:,color],CONV4,mode ="same")
if color == 1 :
op1[:,:,color] = op1[:,:,color]/2
return op1
def masq(img : np.ndarray,conv:np.ndarray, pas:int) ->np.ndarray:
masq = np.zeros((img.shape[0],img.shape[1],3))
po = np.zeros((img.shape[0],img.shape[1],3))
for color in range (3):
for i in range(0,po.shape[0],pas):
for j in range(0,po.shape[1],pas):
masq[i:i+pas,j:j+pas,color] = conv[:,:,color]
po[:,:,color] = masq[:,:,color] * img
return po
\ No newline at end of file
File added
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 12 07:38:51 2024
@author: etudiant
"""
import numpy as np
from scipy.signal import convolve2d
from src.forward_model import CFA
from scipy.ndimage import convolve
def malvar_demosaicing(op: CFA, y: np.ndarray) -> np.ndarray:
"""
Performs a malvar interpolation.
Args:
op (CFA): CFA operator.
y (np.ndarray): Mosaicked image.
Returns:
np.ndarray: Demosaicked image.
"""
z = op.adjoint(y)
# Definnig Masks
R_m = op.mask[:, :, 0]
G_m = op.mask[:, :, 1]
B_m = op.mask[:, :, 2]
GR_GB = np.array([
[0.0, 0.0, -1.0, 0.0, 0.0],
[0.0, 0.0, 2.0, 0.0, 0.0],
[-1.0, 2.0, 4.0, 2.0, -1.0],
[0.0, 0.0, 2.0, 0.0, 0.0],
[0.0, 0.0, -1.0, 0.0, 0.0],
]) / 8
Rg_RB_Bg_BR = np.array(
[
[0.0, 0.0, 0.5, 0.0, 0.0],
[0.0, -1.0, 0.0, -1.0, 0.0],
[-1.0, 4.0, 5.0, 4.0, -1.0],
[0.0, -1.0, 0.0, -1.0, 0.0],
[0.0, 0.0, 0.5, 0.0, 0.0],
]) / 8
Rg_BR_Bg_RB = np.transpose(Rg_RB_Bg_BR)
Rb_BB_Br_RR = np.array(
[
[0.0, 0.0, -1.5, 0.0, 0.0],
[0.0, 2.0, 0.0, 2.0, 0.0],
[-1.5, 0.0, 6.0, 0.0, -1.5],
[0.0, 2.0, 0.0, 2.0, 0.0],
[0.0, 0.0, -1.5, 0.0, 0.0],
]) / 8
R = y * R_m
G = y * G_m
B = y * B_m
del G_m
G = np.where(np.logical_or(R_m == 1, B_m == 1), convolve(y, GR_GB), G)
RBg_RBBR = convolve(y, Rg_RB_Bg_BR)
RBg_BRRB = convolve(y, Rg_BR_Bg_RB)
RBgr_BBRR = convolve(y, Rb_BB_Br_RR)
del GR_GB, Rg_RB_Bg_BR, Rg_BR_Bg_RB, Rb_BB_Br_RR
# Red rows.
R_r = np.transpose(np.any(R_m == 1, axis=1)[None]) * np.ones(R.shape)
# Red columns.
R_c = np.any(R_m == 1, axis=0)[None] * np.ones(R.shape)
# Blue rows.
B_r = np.transpose(np.any(B_m == 1, axis=1)[None]) * np.ones(B.shape)
# Blue columns
B_c = np.any(B_m == 1, axis=0)[None] * np.ones(B.shape)
del R_m, B_m
R = np.where(np.logical_and(R_r == 1, B_c == 1), RBg_RBBR, R)
R = np.where(np.logical_and(B_r == 1, R_c == 1), RBg_BRRB, R)
B = np.where(np.logical_and(B_r == 1, R_c == 1), RBg_RBBR, B)
B = np.where(np.logical_and(R_r == 1, B_c == 1), RBg_BRRB, B)
R = np.where(np.logical_and(B_r == 1, B_c == 1), RBgr_BBRR, R)
B = np.where(np.logical_and(R_r == 1, R_c == 1), RBgr_BBRR, B)
del RBg_RBBR, RBg_BRRB, RBgr_BBRR, R_r, R_c, B_r, B_c
# Combine channels
return np.clip(np.stack((R, G, B), axis=-1), 0, 1)
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.forward_model import CFA
from src.methods.Samia_Bouddahab.function import malvar_demosaicing
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
# Performing the reconstruction.
# TODO
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
res = malvar_demosaicing(op, y)
return np.zeros(op.input_shape)
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
File added
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from scipy.ndimage import generic_filter, convolve
import warnings
from tqdm import tqdm
from src.forward_model import CFA
def regression_filter_func(chans):
"""computes the linear regression coefficients between channel :
red and green
blue and green
green and red
green and blue
Returns:
a is high order coef and b is 0 order.
_type_: a_red, b_red, a_blue, b_blue, a_green_red, b_green_red, a_green_blue, b_green_blue
"""
red = chans[:,:,0].flatten()
green = chans[:,:,1].flatten()
blue = chans[:,:,2].flatten()
nonzeros_red = np.nonzero(red)
nonzeros_blue = np.nonzero(blue)
return np.concatenate((np.polyfit(red[nonzeros_red], green[nonzeros_red], deg=1), np.polyfit(blue[nonzeros_blue], green[nonzeros_blue], deg=1), np.polyfit(green[nonzeros_red], red[nonzeros_red], deg=1), np.polyfit(green[nonzeros_blue], blue[nonzeros_blue], deg=1)), axis=0)
def personal_generic_filter(a, func, footprint, output, msg=""):
"""Apply func on each footprint of a.
"""
# Suppress RankWarning
with warnings.catch_warnings():
warnings.simplefilter('ignore', np.RankWarning)
for i in tqdm(range(a.shape[0]), desc=msg):
for j in range(a.shape[1]):
fen = np.ones((footprint.shape[0], footprint.shape[1], a.shape[2]))
if i+footprint.shape[0] > a.shape[0]:
i_end = a.shape[0]
i_start = i_end - footprint.shape[0]
else:
i_start = i
i_end = i + footprint.shape[0]
if j+footprint.shape[1] > a.shape[1]:
j_end = a.shape[1]
j_start = j_end - footprint.shape[1]
else:
j_start = j
j_end = j + footprint.shape[1]
fen[:i_end-i_start, :j_end-j_start] = a[i_start:i_end, j_start:j_end]
output[i, j, :] = func(a[i_start:i_end, j_start:j_end])
def combine_directions(bh,bv):
combo = np.zeros(bh.shape)
combo[:,:,0] = bh[:,:,0] + bv[:,:,0]
combo[:,:,0][(bh[:,:,0]!=0) * (bv[:,:,0]!=0)] = (bh[:,:,0][(bh[:,:,0]!=0) * (bv[:,:,0]!=0)] + bv[:,:,0][(bh[:,:,0]!=0) * (bv[:,:,0]!=0)])/2
combo[:,:,0][(bh[:,:,0]==0) * (bv[:,:,0]==0)] = 0
combo[:,:,1] = bh[:,:,1]/2 + bv[:,:,1]/2
combo[:,:,2] = bh[:,:,2] + bv[:,:,2]
combo[:,:,2][(bh[:,:,2]!=0) * (bv[:,:,2]!=0)] = (bh[:,:,2][(bh[:,:,2]!=0) * (bv[:,:,2]!=0)] + bv[:,:,2][(bh[:,:,2]!=0) * (bv[:,:,2]!=0)])/2
combo[:,:,2][(bh[:,:,2]==0) * (bv[:,:,2]==0)] = 0
for i in range(combo.shape[0]):
for j in range(combo.shape[1]):
moy = []
if i != 0 :
moy.append(combo[i-1,j,:])
if i != combo.shape[0]-1 :
moy.append(combo[i+1,j,:])
if j != 0 :
moy.append(combo[i,j-1,:])
if j != combo.shape[1]-1 :
moy.append(combo[i,j+1,:])
moy = np.stack(moy).mean(axis=0)
if combo[i,j,0] == 0:
combo[i,j,0] = moy[0]
if combo[i,j,2] == 0:
combo[i,j,2] = moy[2]
return combo
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
# Performing the reconstruction.
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
adjoint = op.adjoint(y)
bh = np.zeros(adjoint.shape)
# Horizontal interpolation
for i in range(3):
for j in range(adjoint.shape[0]):
non_zero = np.nonzero(adjoint[j, :, i])[0]
if len(non_zero) == 0:
continue
bh[j, :, i] = np.interp(np.arange(adjoint.shape[1]), non_zero, adjoint[j, :, i][non_zero])
bv = np.zeros(adjoint.shape)
# Vertical interpolation
for i in range(3):
for j in range(adjoint.shape[1]):
non_zero = np.nonzero(adjoint[:, j, i])[0]
if len(non_zero) == 0:
continue
bv[:, j, i] = np.interp(np.arange(adjoint.shape[0]), non_zero, adjoint[:, j, i][non_zero])
# Residuals interpolation
M = 3
N = 5
kernel_hor = np.ones((M, N))/(M*N)
kernel_ver = np.ones((N, M))/(M*N)
# Horizontal Regression filtering
regh = np.zeros((bh.shape[0], bh.shape[1], 8))
personal_generic_filter(bh, regression_filter_func, kernel_hor, regh,msg="Regression filtering horizontal")
ch = np.zeros(bh.shape)
for i in range(regh.shape[-1]):
regh[:, :, i] = convolve(regh[:, :, i], kernel_hor)
ch[:,:,0] = regh[:,:,0]*bh[:,:,0] + regh[:,:,1]
ch[:,:,1] = (regh[:,:,4]*bh[:,:,1] + regh[:,:,5] + regh[:,:,6]*bh[:,:,1] + regh[:,:,7])/2
ch[:,:,2] = regh[:,:,2]*bh[:,:,2] + regh[:,:,3]
# return ch[:,:,1]
dh = ch - adjoint
dh[adjoint==0] = 0
# interpolation
for i in range(3):
for j in range(adjoint.shape[0]):
non_zero = np.nonzero(dh[j, :, i])[0]
if len(non_zero) == 0:
continue
ch[j, :, i] -= np.interp(np.arange(adjoint.shape[1]), non_zero, dh[j, :, i][non_zero])
ch[bh == 0] = 0
bh = ch.copy()
# return bh[:,:,0]
# Vertical Regression filtering
regv = np.zeros((bv.shape[0], bv.shape[1], 8))
personal_generic_filter(bv, regression_filter_func, kernel_ver, regv,msg="Regression filtering vertical")
cv = np.zeros(bv.shape)
for i in range(regv.shape[-1]):
regv[:, :, i] = convolve(regv[:, :, i], kernel_ver)
cv[:,:,0] = regv[:,:,0]*bv[:,:,0] + regv[:,:,1]
cv[:,:,1] = (regv[:,:,4]*bv[:,:,1] + regv[:,:,5] + regv[:,:,6]*bv[:,:,1] + regv[:,:,7])/2
cv[:,:,2] = regv[:,:,2]*bv[:,:,2] + regv[:,:,3]
dv = cv - adjoint
dv[adjoint==0] = 0
# interpolation
for i in range(3):
for j in range(adjoint.shape[1]):
non_zero = np.nonzero(dv[:, j, i])[0]
if len(non_zero) == 0:
continue
cv[:, j, i] -= np.interp(np.arange(adjoint.shape[0]), non_zero, dv[:, j, i][non_zero])
cv[bv == 0] = 0
bv = cv.copy()
return combine_directions(bh,bv)
if __name__ == "__main__":
from src.utils import load_image, save_image, psnr, ssim
from src.forward_model import CFA
image_path = 'images/img_4.png'
img = load_image(image_path)
cfa_name = 'bayer' # bayer or quad_bayer
op = CFA(cfa_name, img.shape)
y = op.direct(img)
res = run_reconstruction(y, cfa_name)
print('PSNR:', psnr(img, res))
print('SSIM:', ssim(img, res))
save_image('output/bh.png', res)
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
File added
This diff is collapsed.
"""A file containing a (pretty useless) reconstruction.
It serves as example of how the project works.
This file should NOT be modified.
"""
import numpy as np
from scipy.signal import convolve2d
from src.forward_model import CFA
def new_interpolation(op: CFA, y: np.ndarray) -> np.ndarray:
"""Performs a simple interpolation of the lost pixels.
Args:
op (CFA): CFA operator.
y (np.ndarray): Mosaicked image.
Returns:
np.ndarray: Demosaicked image.
"""
z = op.adjoint(y)
if op.cfa == 'bayer':
demosaicked_red = np.empty_like(z[:,:,0])
weights_red = np.array([[0, 1],
[0, 0]])
for i in range(1, z.shape[0] - 1,2):
for j in range(1, z.shape[1] - 1,2):
demosaicked_red[i-1:i+1, j-1:j+1] = np.sum(z[i-1:i+1, j-1:j+1,0] * weights_red)
demosaicked_green = np.empty_like(z[:,:,1])
weights_green = np.array([[1, 0],
[0, 1]])
for i in range(1, z.shape[0] - 1,2):
for j in range(1, z.shape[1] - 1,2):
demosaicked_green[i-1:i+1, j-1:j+1] = np.sum(z[i-1:i+1, j-1:j+1,1] * weights_green)/2
weights_blue = np.array([[0, 0],
[1, 0]])
demosaicked_blue = np.empty_like(z[:,:,2])
for i in range(1, z.shape[0] - 1,2):
for j in range(1, z.shape[1] - 1,2):
demosaicked_blue[i-1:i+1, j-1:j+1] = np.sum(z[i-1:i+1, j-1:j+1,2] * weights_blue)
demosaicked_image = np.dstack((demosaicked_red, demosaicked_green, demosaicked_blue))
else:
demosaicked_red = np.empty_like(z[:,:,0])
weights_red = np.array([[0, 0, 1, 1 ],
[0, 0, 1, 1 ],
[0, 0, 0, 0 ],
[0, 0, 0, 0 ]])
for i in range(2, z.shape[0] - 2, 4):
for j in range(2, z.shape[1] - 2, 4):
demosaicked_red[i-2:i+2, j-2:j+2] = np.sum(z[i-2:i+2, j-2:j+2,0] * weights_red)/4
demosaicked_green = np.empty_like(z[:,:,1])
weights_green = np.array([[1, 1, 0, 0 ],
[1, 1, 0, 0 ],
[0, 0, 1, 1 ],
[0, 0, 1, 1 ]])
for i in range(2, z.shape[0] - 2, 4):
for j in range(2, z.shape[1] - 2, 4):
demosaicked_green[i-2:i+2, j-2:j+2] = np.sum(z[i-2:i+2, j-2:j+2,1] * weights_green)/8
weights_blue = np.array([[0, 0, 0, 0 ],
[0, 0, 0, 0 ],
[1, 1, 0, 0 ],
[1, 1, 0, 0 ]])
demosaicked_blue = np.empty_like(z[:,:,2])
for i in range(2, z.shape[0] - 2, 4):
for j in range(2, z.shape[1] - 2, 4):
demosaicked_blue[i-2:i+2, j-2:j+2] = np.sum(z[i-2:i+2, j-2:j+2,2] * weights_blue)/4
demosaicked_image = np.dstack((demosaicked_red, demosaicked_green, demosaicked_blue))
return demosaicked_image
def extract_padded(M, size, i, j):
N_i, N_j = M.shape
res = np.zeros((size, size))
middle_size = int((size - 1) / 2)
for ii in range(- middle_size, middle_size + 1):
for jj in range(- middle_size, middle_size + 1):
if i + ii >= 0 and i + ii < N_i and j + jj >= 0 and j + jj < N_j:
res[middle_size + ii, middle_size + jj] = M[i + ii, j + jj]
return res
def varying_kernel_convolution(M, K_list):
N_i, N_j = M.shape
res = np.zeros_like(M)
for i in range(N_i):
for j in range(N_j):
res[i, j] = np.sum(extract_padded(M, K_list[4 * (i % 4) + j % 4].shape[0], i, j) * K_list[4 * (i % 4) + j % 4])
np.clip(res, 0, 1, res)
return res
K_identity = np.zeros((5, 5))
K_identity[2, 2] = 1
K_red_0 = np.zeros((5, 5))
K_red_0[2, :] = np.array([-3, 13, 0, 0, 2]) / 12
K_red_1 = np.zeros((5, 5))
K_red_1[2, :] = np.array([2, 0, 0, 13, -3]) / 12
K_red_8 = np.zeros((5, 5))
K_red_8[:2, :2] = np.array([[-1, -1], [-1, 9]]) / 6
K_red_9 = np.zeros((5, 5))
K_red_9[:2, 3:] = np.array([[-1, -1], [9, -1]]) / 6
K_red_10 = np.zeros((5, 5))
K_red_10[:, 2] = np.array([-3, 13, 0, 0, 2]) / 12
K_red_12 = np.zeros((5, 5))
K_red_12[3:, :2] = np.array([[-1, 9], [-1, -1]]) / 6
K_red_13 = np.zeros((5, 5))
K_red_13[3:, 3:] = np.array([[9, -1], [-1, -1]]) / 6
K_red_14 = np.zeros((5, 5))
K_red_14[:, 2] = np.array([2, 0, 0, 13, -3]) / 12
K_list_red = [K_red_0, K_red_1, K_identity, K_identity, K_red_0, K_red_1, K_identity, K_identity, K_red_8, K_red_9, K_red_10, K_red_10, K_red_12, K_red_13, K_red_14, K_red_14]
K_green_2 = np.zeros((5, 5))
K_green_2[2, :] = [-3, 13, 0, 0, 2]
K_green_2[:, 2] = [-3, 13, 0, 0, 2]
K_green_2 = K_green_2 / 24
K_green_3 = np.zeros((5, 5))
K_green_3[2, :] = [2, 0, 0, 13, -3]
K_green_3[:, 2] = [-3, 13, 0, 0, 2]
K_green_3 = K_green_3 / 24
K_green_6 = np.zeros((5, 5))
K_green_6[2, :] = [-3, 13, 0, 0, 2]
K_green_6[:, 2] = [2, 0, 0, 13, -3]
K_green_6 = K_green_6 / 24
K_green_7 = np.zeros((5, 5))
K_green_7[2, :] = [2, 0, 0, 13, -3]
K_green_7[:, 2] = [2, 0, 0, 13, -3]
K_green_7 = K_green_7 / 24
K_list_green = [K_identity, K_identity, K_green_2, K_green_3, K_identity, K_identity, K_green_6, K_green_7, K_green_2, K_green_3, K_identity, K_identity, K_green_6, K_green_7, K_identity, K_identity]
K_list_blue = [K_red_10, K_red_10, K_red_8, K_red_9, K_red_14, K_red_14, K_red_12, K_red_13, K_identity, K_identity, K_red_0, K_red_1, K_identity, K_identity, K_red_0, K_red_1]
ker_bayer_red_blue = np.array([[1, 2, 1], [2, 4, 2], [1, 2, 1]]) / 4
ker_bayer_green = np.array([[0, 1, 0], [1, 4, 1], [0, 1, 0]]) / 4
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.forward_model import CFA
from src.methods.Yosra_Jelassi.new_reconstruction import new_interpolation
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
# Performing the reconstruction.
# TODO
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
res = new_interpolation(op, y)
return res
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
File added
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
import cv2 as cv
from skimage.filters import rank
from src.forward_model import CFA
from skimage.morphology import disk
import matplotlib.pyplot as plt
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
# Performing the reconstruction.
# TODO
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
z = op.adjoint(y)
################# MY CODE ############################
if op.cfa == 'quad_bayer' :
# Transform into a Bayer-patterned image
y = QuadBayer_to_Bayer(y, op, vizualize=False)
# Demosaïck the image
res = demosaicking_bayer(y)
if op.cfa == 'bayer':
res = demosaicking_bayer(y)
####################################################
return res
def get_color(mask, i, j):
"""
Returns the index of the chanel that contains already a color at a pixel localization.
Args :
mask (np.ndarray) : mask of the used cfa.
i (int): index of the line of the pixel.
j (int): index of the column of the pixel.
Returns
int: the index of the color avaible at the indicated pixel.
"""
if mask[i,j,0] == 1:
return 0
if mask[i,j,1] == 1:
return 1
if mask[i,j,2] == 1:
return 2
return None
def demosaicking_bayer(y):
"""
Performs High-Quality Linear Interpolation on a Bayer-patterned image.
Args :
y (np.ndarray): Mosaicked image to be reconstructed.
Returns
np.ndarray: Demosaicked image.
"""
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA("bayer", input_shape)
res = np.empty(op.input_shape)
for i in range (2, input_shape[0]-2):
for j in range (2, input_shape[0]-2):
patch = y[i-2:i+3, j-2:j+3]
channel = get_color(op.mask, i, j)
# If the Red value is avaible
if channel == 0:
res[i, j, 0] = y[i,j]
# Interpolate the Green value
res[i, j, 1] = np.sum(patch*G_at_R)/8
#Interpolate the Blue value
res[i, j, 2] = np.sum(patch* B_at_R)/8
# If the Green value is avaible
if channel == 1 :
res[i, j, 1] = y[i,j]
# Interpolation of the Red value
if get_color(op.mask, i, j+1) == 0:
res[i,j,0] = np.sum(patch*R_at_G_row)/8
else :
res[i,j,0] = np.sum(patch*R_at_G_column)/8
# Interpolation of the Blue value
if get_color(op.mask, i, j+1) == 2:
res[i,j,2] = np.sum(patch* B_at_G_row)/8
else :
res[i,j,2] = np.sum(patch * B_at_G_column)/8
# If the Blue value is avaible
if channel == 2:
res[i, j, 2] = y[i,j]
# Interpolate the Red value
res[i, j, 1] = np.sum(patch* G_at_B)/8
#Interpolate the Red value
res[i, j, 0] = np.sum(patch* R_at_B)/8
return res
def QuadBayer_to_Bayer(y, op, vizualize=False):
"""
Applies the swapping method to transform a QuadBayer-patterned image into a Bayer-patterned image
Args :
y (np.ndarray): Mosaicked image to be tranformed
op (CFA) : the CFA object
vizualize (Boolean) : show the evolution of the mask if True
Returns :
(np.ndarray): a bayer-patterned image
"""
input_shape = (y.shape[0], y.shape[1], 3)
if vizualize :
fig, axs = plt.subplots(1, 4, figsize=(15,15))
axs[0].imshow(op.mask[0:10, 0:10, :])
axs[0].set_title('Original mask')
# Step 1 : Swap 2 columns every 2 columns
temp = np.zeros((input_shape[0], 1))
temp_mask = np.zeros((input_shape[0], 1, 3))
for col in range (1, input_shape[0]-1, 4):
temp = np.copy(y[:, col])
y[:, col] = np.copy(y[:, col+1])
y[:, col+1] = np.copy(temp)
if vizualize:
temp_mask = np.copy(op.mask[:, col,:])
op.mask[:, col,:] = np.copy(op.mask[:, col+1,:])
op.mask[:, col+1,:] = np.copy(temp_mask)
if vizualize:
axs[1].imshow(op.mask[0:10, 0:10, :])
axs[1].set_title('Mask after first step')
#Step 2 : Swap 2 lines every 2 lines
temp = np.zeros((1, input_shape[1]))
temp_mask = np.zeros((1, input_shape[1],3))
for line in range (1, input_shape[1], 4):
temp = np.copy(y[line, :])
y[line, :] = np.copy(y[line+1, :])
y[line+1, :] = np.copy(temp)
if vizualize:
temp_mask = np.copy(op.mask[line, :, :])
op.mask[line, :, :] = np.copy(op.mask[line+1, :, :])
op.mask[line+1, :, :] = np.copy(temp_mask)
if vizualize:
axs[2].imshow(op.mask[0:10, 0:10, :])
axs[2].set_title('Mask after second step')
# 3: Swap back some diagonal greens
temp_mask = np.zeros((1,1,3))
for i in range(0, input_shape[0], 4):
for j in range(2, input_shape[1], 4):
temp = y[i, j]
y[i, j] = y[i+1, j-1]
y[i+1, j-1] = temp
if vizualize:
temp_mask = op.mask[i, j, :]
op.mask[i, j, :] = np.copy(op.mask[i+1, j-1, :])
op.mask[i+1, j-1, :] = np.copy(temp_mask)
if vizualize:
axs[3].imshow(op.mask[0:10, 0:10, :])
axs[3].set_title('Mask after third step')
plt.tight_layout()
plt.show()
return y
# Defining the mask
# Interpolate the Green
# Green at Red
G_at_R = [[ 0, 0,-1, 0, 0],
[ 0, 0, 2, 0, 0],
[-1, 2, 4, 2,-1],
[ 0, 0, 2, 0, 0],
[ 0, 0,-1, 0, 0]]
# Green at Blue
G_at_B = [[ 0, 0,-1, 0, 0],
[ 0, 0, 2, 0, 0],
[-1, 2, 4, 2,-1],
[ 0, 0, 2, 0, 0],
[ 0, 0,-1, 0, 0]]
#Interpolate the Red
# R at G in R row, B column
R_at_G_row = [[ 0, 0, 0.5, 0, 0],
[0, -1, 0 ,-1, 0],
[-1, 4, 5 , 4,-1],
[0, -1, 0 ,-1, 0],
[0, 0, 0.5, 0 , 0]]
# R at G in B row, R column
R_at_G_column = [[0, 0,-1, 0, 0],
[0,-1, 4,-1, 0],
[0.5,0, 5,0,0.5],
[ 0 ,-1,4,-1, 0],
[0,0,-1,0,0]]
# Red at Blue
R_at_B = [[0, 0, -1.5, 0, 0],
[0, 2, 0, 2, 0],
[-1.5, 0, 6, 0, -1.5],
[0, 2, 0, 2, 0],
[0, 0, -1.5, 0, 0]]
#Intelropation for Blue
# Blue at Green in B row, R column
B_at_G_row = [[ 0, 0, 0.5, 0, 0],
[0, -1, 0 ,-1, 0],
[-1, 4, 5 , 4,-1],
[0, -1, 0 ,-1, 0],
[0, 0, 0.5, 0 , 0]]
B_at_G_column = [[0, 0,-1, 0, 0],
[0,-1, 4,-1, 0],
[0.5,0, 5,0,0.5],
[ 0 ,-1,4,-1, 0],
[0,0,-1,0,0]]
B_at_R = [[0, 0, -1.5, 0, 0],
[0, 2, 0, 2, 0],
[-1.5, 0, 6, 0, -1.5],
[0, 2, 0, 2, 0],
[0, 0, -1.5, 0, 0]]
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
File added
"""A file containing a (pretty useless) reconstruction.
It serves as example of how the project works.
This file should NOT be modified.
"""
import numpy as np
from scipy.signal import convolve2d
from src.forward_model import CFA
def High_Quality_Linear_Interpolation(op: CFA, y: np.ndarray) -> np.ndarray:
"""Performs a High-Quality Linear Interpolation of the lost pixels.
Args:
op (CFA): CFA operator.
y (np.ndarray): Mosaicked image.
Returns:
np.ndarray: Demosaicked image.
"""
z = op.adjoint(y)
#operation for the bayer pattern
if op.cfa == 'bayer':
res = np.empty(op.input_shape)
mask = op.mask
R = 0
G = 1
B = 2
y_padded = np.pad(y,2, 'constant', constant_values=0)
for i in range(2,y_padded.shape[0]-2):
for j in range(2,y_padded.shape[1]-2):
for c in range(op.input_shape[2]):
i_real = i-2
j_real = j-2
if (c==R and mask[i_real,j_real,R] == 1) or (c==G and mask[i_real,j_real,G] == 1)or (c==B and mask[i_real,j_real,B] == 1): #no need to find the pixel value
res[i_real,j_real,c] = y_padded[i,j]
elif (c == G) and (mask[i_real,j_real,B] == 1 or mask[i_real,j_real,R] == 1): #green channel and pixel value = B or R
res[i_real,j_real,c] = float(convolve2d(y_padded[i-2:i+3, j-2:j+3], hq_g_r_and_b, mode='valid'))
elif (c == R or c== B )and (mask[i_real,j_real,G] == 1) and (mask[i_real,j_real-1,c] == 0): #red or blue channel and pixel value = G and left pixel isn't in the recovered layer
res[i_real,j_real,c] = float(convolve2d(y_padded[i-2:i+3, j-2:j+3], hq_r_and_b_g_brow_rcol, mode='valid'))
elif (c == R or c == B) and (mask[i_real,j_real,G] == 1) and (mask[i_real-1,j_real,c] == 0): #red or blue channel and pixel value = G and up pixel isn't in the recovered layer
res[i_real,j_real,c] = float(convolve2d(y_padded[i-2:i+3, j-2:j+3], hq_r_and_b_g_rrow_bcol, mode='valid'))
elif (c == R and mask[i_real,j_real,B] == 1) or (c == B and mask[i_real,j_real,R] == 1) : #red channel and pixel value = B or inverse
res[i_real,j_real,c] = float(convolve2d(y_padded[i-2:i+3, j-2:j+3], hq_r_b, mode='valid'))
else : print("error")
#operation for the quad bayer pattern
else:
res = np.empty(op.input_shape)
mask = op.mask
R = 0
G = 1
B = 2
y_padded = np.pad(y,4, 'constant', constant_values=0)
for i in range(4,y_padded.shape[0]-4, 2):
for j in range(4,y_padded.shape[1]-4, 2):
for c in range(op.input_shape[2]):
i_real = i-4
j_real = j-4
if (c==R and mask[i_real,j_real,R] == 1) or (c==G and mask[i_real,j_real,G] == 1)or (c==B and mask[i_real,j_real,B] == 1): #no need to find the pixel value
res[i_real:i_real+2,j_real:j_real+2,c] = y_padded[i:i+2,j:j+2]
elif (c == G) and (mask[i_real,j_real,B] == 1 or mask[i_real,j_real,R] == 1): #green channel and pixel value = B or R
# res[i_real:i_real+2,j_real:j_real+2,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], hq_quad_g_r_and_b, mode='valid'))
res[i_real,j_real,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], A_hq_quad_g_r_and_b, mode='valid'))
res[i_real,j_real+1,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], B_hq_quad_g_r_and_b, mode='valid'))
res[i_real+1,j_real,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], C_hq_quad_g_r_and_b, mode='valid'))
res[i_real+1,j_real+1,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], D_hq_quad_g_r_and_b, mode='valid'))
elif (c == R or c== B )and (mask[i_real,j_real,G] == 1) and (mask[i_real,j_real-2,c] == 0): #red or blue channel and pixel value = G and left pixel isn't in the recovered layer
# res[i_real:i_real+2,j_real:j_real+2,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], hq_quad_r_and_b_g_brow_rcol, mode='valid'))
res[i_real,j_real,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], A_hq_quad_r_and_b_g_brow_rcol, mode='valid'))
res[i_real,j_real+1,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], B_hq_quad_r_and_b_g_brow_rcol, mode='valid'))
res[i_real+1,j_real,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], C_hq_quad_r_and_b_g_brow_rcol, mode='valid'))
res[i_real+1,j_real+1,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], D_hq_quad_r_and_b_g_brow_rcol, mode='valid'))
elif (c == R or c == B) and (mask[i_real,j_real,G] == 1) and (mask[i_real-2,j_real,c] == 0): #red or blue channel and pixel value = G and up pixel isn't in the recovered layer
# res[i_real:i_real+2,j_real:j_real+2,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], hq_quad_r_and_b_g_rrow_bcol, mode='valid'))
res[i_real,j_real,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], A_hq_quad_r_and_b_g_rrow_bcol, mode='valid'))
res[i_real,j_real+1,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], B_hq_quad_r_and_b_g_rrow_bcol, mode='valid'))
res[i_real+1,j_real,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], C_hq_quad_r_and_b_g_rrow_bcol, mode='valid'))
res[i_real+1,j_real+1,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], D_hq_quad_r_and_b_g_rrow_bcol, mode='valid'))
elif (c == R and mask[i_real,j_real,B] == 1) or (c == B and mask[i_real,j_real,R] == 1) : #red channel and pixel value = B or inverse
# res[i_real:i_real+2,j_real:j_real+2,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], hq_quad_r_b, mode='valid'))
res[i_real,j_real,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], A_hq_quad_r_b, mode='valid'))
res[i_real,j_real+1,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], B_hq_quad_r_b, mode='valid'))
res[i_real+1,j_real,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], C_hq_quad_r_b, mode='valid'))
res[i_real+1,j_real+1,c] = float(convolve2d(y_padded[i-4:i+6, j-4:j+6], D_hq_quad_r_b, mode='valid'))
else : print("error")
np.clip(res, 0, 1, res)
return res
# Bayer masks
hq_g_r_and_b = np.array([[0, 0, -1, 0, 0], [0, 0, 2, 0, 0], [-1, 2, 4, 2, -1], [0, 0, 2, 0, 0], [0, 0, -1, 0, 0]]) / 8
hq_r_and_b_g_rrow_bcol = np.array([[0, 0, 1/2, 0, 0], [0, -1, 0, -1, 0], [-1, 4, 5, 4, -1], [0, -1, 0, -1, 0], [0, 0, 1/2, 0, 0]]) / 8
hq_r_and_b_g_brow_rcol = hq_r_and_b_g_rrow_bcol.T
hq_r_b = np.array([[0, 0, -3/2, 0, 0], [0, 2, 0, 2, 0], [-3/2, 0, 6, 0, -3/2], [0, 2, 0, 2, 0], [0, 0, -3/2, 0, 0]]) / 8
# Quad bayer masks
## Calcul the value of 4 pixels with one mask, then the value is apply for the 4 pixels
hq_quad_g_r_and_b = np.array([[0,0, 0,0, -1,-1, 0,0, 0,0],[0,0, 0,0, -1,-1, 0,0, 0,0],
[0,0,0, 0,2, 2, 0,0, 0,0],[0,0,0, 0,2, 2, 0,0, 0,0],
[-1,-1, 2,2, 4,4, 2,2, -1,-1],[-1,-1, 2,2, 4,4, 2,2, -1,-1],
[0,0, 0,0, 2,2, 0,0, 0,0],[0,0, 0,0, 2,2, 0,0, 0,0],
[0,0, 0,0, -1,-1, 0,0, 0,0],[0,0, 0,0, -1,-1, 0,0, 0,0]]) / 16
hq_quad_r_and_b_g_rrow_bcol = np.array([[0,0, 0,0, 1/2,1/2, 0,0, 0,0],[0,0, 0,0, 1/2,1/2, 0,0, 0,0],
[0,0, -1,-1, 0,0, -1,-1, 0,0],[0,0, -1,-1, 0,0, -1,-1, 0,0],
[-1,-1, 4,4, 5,5, 4,4, -1,-1],[-1,-1, 4,4, 5,5, 4,4, -1,-1],
[0,0, -1,-1, 0,0, -1,-1, 0,0],[0,0, -1,-1, 0,0, -1,-1, 0,0],
[0,0, 0,0, 1/2,1/2, 0,0, 0,0],[0,0, 0,0, 1/2,1/2, 0,0, 0,0]]) / 16
hq_quad_r_and_b_g_brow_rcol = hq_quad_r_and_b_g_rrow_bcol.T
hq_quad_r_b = np.array([[0,0, 0,0, -3/2,-3/2, 0,0, 0,0],[0,0, 0,0, -3/2,-3/2, 0,0, 0,0],
[0,0, 2,2, 0,0, 2,2, 0,0],[0,0, 2,2, 0,0, 2,2, 0,0],
[-3/2,-3/2, 0,0, 6,6, 0,0, -3/2,-3/2],[-3/2,-3/2, 0,0, 6,6, 0,0, -3/2,-3/2],
[0,0, 2,2, 0,0, 2,2, 0,0],[0,0, 2,2, 0,0, 2,2, 0,0],
[0,0, 0,0, -3/2,-3/2, 0,0, 0,0],[0,0, 0,0, -3/2,-3/2, 0,0, 0,0]]) / 16
## Calcul the value of one pixel of the 2x2 matrix independentely
# A = up left pixel / B = up right pixel / C = down left pixel / D = down right pixel
A_hq_quad_g_r_and_b = np.array([[0,0, 0,0, -1,0, 0,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, 0,0, 2,0, 0,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[-1,0, 2,0, 4,0, 2,0, -1,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, 0,0, 2,0, 0,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, 0,0, -1,0, 0,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0]]) / 8
B_hq_quad_g_r_and_b = np.array([[0,0, 0,0, 0,-1, 0,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, 0,0, 0,2, 0,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,-1, 0,2, 0,4, 0,2, 0,-1],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, 0,0, 0,2, 0,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, 0,0, 0,-1, 0,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0]]) / 8
C_hq_quad_g_r_and_b = np.array([[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,0, -1,0, 0,0, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,0, 2,0, 0,0, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[-1,0, 2,0, 4,0, 2,0, -1,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,0, 2,0, 0,0, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,0, -1,0, 0,0, 0,0]]) / 8
D_hq_quad_g_r_and_b = np.array([[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,0, 0,-1, 0,0, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,0, 0,2, 0,0, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,-1, 0,2, 0,4, 0,2, 0,-1],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,0, 0,2, 0,0, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,0, 0,-1, 0,0, 0,0]]) / 8
A_hq_quad_r_and_b_g_rrow_bcol = np.array([[0,0, 0,0, 1/2,0, 0,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, -1,0, 0,0, -1,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[-1,0, 4,0, 5,0, 4,0, -1,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, -1,0, 0,0, -1,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, 0,0, 1/2,0, 0,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0]]) / 8
B_hq_quad_r_and_b_g_rrow_bcol = np.array([[0,0, 0,0, 0,1/2, 0,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, 0,-1, 0,0, 0,-1, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,-1, 0,4, 0,5, 0,4, 0,-1],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, 0,-1, 0,0, 0,-1, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, 0,0, 0,-1/2, 0,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0]]) / 8
C_hq_quad_r_and_b_g_rrow_bcol = np.array([[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,0, 1/2,0, 0,0, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, -1,0, 0,0, -1,0, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[-1,0, 4,0, 5,0, 4,0, -1,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, -1,0, 0,0, -1,0, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,0, 1/2,0, 0,0, 0,0]]) / 8
D_hq_quad_r_and_b_g_rrow_bcol = np.array([[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,0, 0,1/2, 0,0, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,-1, 0,0, 0,-1, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,-1, 0,4, 0,5, 0,4, 0,-1],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,-1, 0,0, 0,-1, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,0, 0,-1/2, 0,0, 0,0]]) / 8
A_hq_quad_r_and_b_g_brow_rcol = A_hq_quad_r_and_b_g_rrow_bcol.T
B_hq_quad_r_and_b_g_brow_rcol = B_hq_quad_r_and_b_g_rrow_bcol.T
C_hq_quad_r_and_b_g_brow_rcol = C_hq_quad_r_and_b_g_rrow_bcol.T
D_hq_quad_r_and_b_g_brow_rcol = D_hq_quad_r_and_b_g_rrow_bcol.T
A_hq_quad_r_b = np.array([[0,0, 0,0, -3/2,0, 0,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, 2,0, 0,0, 2,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[-3/2,0, 0,0, 6,0, 0,0, -3/2,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, 2,0, 0,0, 2,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, 0,0, -3/2,0, 0,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0]]) / 8
B_hq_quad_r_b = np.array([[0,0, 0,0, 0,-3/2, 0,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, 0,2, 0,0, 0,2, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,-3/2, 0,0, 0,6, 0,0, 0,-3/2],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, 0,2, 0,0, 0,2, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0],
[0,0, 0,0, 0,-3/2, 0,0, 0,0],[0,0, 0,0, 0,0, 0,0, 0,0]]) / 8
C_hq_quad_r_b = np.array([[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,0, -3/2,0, 0,0, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 2,0, 0,0, 2,0, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[-3/2,0, 0,0, 6,0, 0,0, -3/2,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 2,0, 0,0, 2,0, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,0, -3/2,0, 0,0, 0,0]]) / 8
D_hq_quad_r_b = np.array([[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,0, 0,-3/2, 0,0, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,2, 0,0, 0,2, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,-3/2, 0,0, 0,6, 0,0, 0,-3/2],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,2, 0,0, 0,2, 0,0],
[0,0, 0,0, 0,0, 0,0, 0,0],[0,0, 0,0, 0,-3/2, 0,0, 0,0]]) / 8
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.forward_model import CFA
import src.methods.babacar.demoisaicing_fct as b
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
# Performing the reconstruction.
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
res = b.High_Quality_Linear_Interpolation(op, y)
return res
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
File mode changed from 100755 to 100644