Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • samanost/sicom_image_analysis_project
  • gerayelk/sicom_image_analysis_project
  • jelassiy/sicom_image_analysis_project
  • chardoto/sicom_image_analysis_project
  • chaarim/sicom_image_analysis_project
  • domers/sicom_image_analysis_project
  • elmurrt/sicom_image_analysis_project
  • sadonest/sicom_image_analysis_project
  • kouddann/sicom_image_analysis_project
  • mirabitj/sicom-image-analysis-project-mirabito
  • plotj/sicom_image_analysis_project
  • torrem/sicom-image-analysis-project-maxime-torre
  • dzike/sicom_image_analysis_project
  • daip/sicom_image_analysis_project
  • casanovv/sicom_image_analysis_project
  • girmarti/sicom_image_analysis_project
  • lioretn/sicom_image_analysis_project
  • lemoinje/sicom_image_analysis_project
  • ouahmanf/sicom_image_analysis_project
  • vouilloa/sicom_image_analysis_project
  • diopb/sicom_image_analysis_project
  • davidale/sicom_image_analysis_project
  • enza/sicom_image_analysis_project
  • conversb/sicom_image_analysis_project
  • mullemat/sicom_image_analysis_project
25 results
Show changes
Showing
with 1344 additions and 1 deletion
import numpy as np
from scipy.signal import convolve2d
def down_sample(image):
down_sampled = np.empty((int(image.shape[0] / 2), int(image.shape[1] / 2)))
down_sampled[:, :] = (image[::2, ::2] + image[1::2, ::2] + image[::2, 1::2] + image[1::2, 1::2]) / 4
return down_sampled
def up_sample(image):
up_sampled = np.empty((int(image.shape[0] * 2), int(image.shape[1] * 2), image.shape[2]))
up_sampled[::2, ::2, :] = image[::1, ::1, :]
up_sampled[1::2, ::2, :] = image[::1, ::1, :]
up_sampled[::2, 1::2, :] = image[::1, ::1, :]
up_sampled[1::2, 1::2, :] = image[::1, ::1, :]
return up_sampled
def refine(image):
res = np.empty((image.shape[0], image.shape[1], 3))
ker = np.array([[1, 2, 1], [2, 4, 2], [1, 2, 1]]) / 16
for i in range(3):
res[:, :, i] = convolve2d(image[:, :, i], ker, mode='same')
return res
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.forward_model import CFA
from src.methods.marty.mdwi import MDWI
from src.methods.marty.high_quality_interpolation import high_quality_interpolation
from src.methods.marty.quad_bayer import down_sample, up_sample, refine
# Either MDWI or high_quality_interpolation
method = high_quality_interpolation
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
# Performing the reconstruction.
if (cfa == "bayer") :
res = method(y)
else :
down_sample_image = down_sample(y)
bayer_image = method(down_sample_image)
res = up_sample(bayer_image)
res = refine(res)
return res
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
File added
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
from src.forward_model import CFA
import numpy as np
from scipy.signal import convolve2d
import cv2
def is_green(z,i, j):
return z[i, j, 1] != 0
def hamilton_adams_interpolation(y, op, z):
height, width = y.shape
green_channel = np.copy(z[:, :, 1])
for i in range(1, height-1):
for j in range(1, width-1):
if not is_green(z,i, j) :
delta_H = abs(z[i, j-1, 1] - z[i, j+1, 1]) + abs(z[i, j-1, 0] - z[i, j+1, 0] + z[i, j-1, 2] - z[i, j+1, 2]) / 2
# print(f"delta_H : {delta_H}")
delta_V = abs(z[i-1, j, 1] - z[i+1, j, 1]) + abs(z[i-1, j, 0] - z[i+1, j, 0] + z[i-1, j, 2] - z[i+1, j, 2]) / 2
if delta_H > delta_V:
green_channel[i, j] = (z[i-1, j, 1] + z[i+1, j, 1]) / 2 + (z[i, j-1, 0] - z[i, j+1, 0] + z[i, j-1, 2] - z[i, j+1, 2]) / 4
elif delta_H < delta_V:
green_channel[i, j] = (z[i, j-1, 1] + z[i, j+1, 1]) / 2 + (z[i-1, j, 0] - z[i+1, j, 0] + z[i-1, j, 2] - z[i+1, j, 2]) / 4
else:
green_channel[i, j] = (z[i-1, j, 1] + z[i+1, j, 1] + z[i, j-1, 1] + z[i, j+1, 1]) / 4 + \
(z[i, j-1, 0] - z[i, j+1, 0] + z[i, j-1, 2] - z[i, j+1, 2] + \
z[i-1, j, 0] - z[i+1, j, 0] + z[i-1, j, 2] - z[i+1, j, 2]) / 8
return green_channel
def interpolate_channel_difference(mosaicked_channel, green_channel_interpolated):
ker_bayer_red_blue = np.array([[1, 2, 1], [2, 4, 2], [1, 2, 1]]) / 4
print(mosaicked_channel.shape, green_channel_interpolated.shape)
difference = mosaicked_channel - green_channel_interpolated
difference_interpolated = convolve2d(difference, np.ones((3, 3)) / 9, mode='same', boundary='wrap')
channel_interpolated = green_channel_interpolated + difference_interpolated
channel_interpolated = convolve2d(channel_interpolated, ker_bayer_red_blue, mode='same')
return channel_interpolated
def Constant_difference_based_interpolation_reconstruction(op, y, z):
if op.cfa == 'bayer':
print("bayer")
red_channel = z[:, :, 0]
green_channel = z[:, :, 1]
blue_channel = z[:, :, 2]
green_channel_reconstruct = hamilton_adams_interpolation(y, op, z)
red_channel_interpolated = interpolate_channel_difference(red_channel, green_channel_reconstruct)
blue_channel_interpolated = interpolate_channel_difference(blue_channel, green_channel_reconstruct)
reconstructed_image = np.stack((red_channel_interpolated, green_channel_reconstruct, blue_channel_interpolated), axis=-1)
return reconstructed_image
elif op.cfa == "quad_bayer":
print(f"quad_bayer")
new_z = cv2.resize(z, (z.shape[1] // 2, z.shape[0] // 2), interpolation=cv2.INTER_AREA)
new_y=np.sum(new_z, axis=2)
op.mask = op.mask[::2, ::2]
green_channel_reconstruct_new = hamilton_adams_interpolation(new_y, op, new_z)
red_channel_new = new_z[:, :, 0]
blue_channel_new = new_z[:, :, 2]
red_channel_interpolated_new = interpolate_channel_difference(red_channel_new, green_channel_reconstruct_new)
blue_channel_interpolated_new = interpolate_channel_difference(blue_channel_new, green_channel_reconstruct_new)
reconstructed_image_new = np.stack((red_channel_interpolated_new, green_channel_reconstruct_new, blue_channel_interpolated_new), axis=-1)
reconstructed_image_upsampled = cv2.resize(reconstructed_image_new, (z.shape[1], z.shape[0]), interpolation=cv2.INTER_LINEAR)
return reconstructed_image_upsampled
else :
raise ValueError("CFA pattern not recognized")
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
z = op.adjoint(y)
reconstructed_image = Constant_difference_based_interpolation_reconstruction(op, y, z)
return reconstructed_image
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
......@@ -5,7 +5,7 @@ Students can call their functions (declared in others files of src/methods/your_
import numpy as np
from src.methods.template.mourasa_reconstruct import *
from src.methods.mouras_aubin.mourasa_reconstruct import *
from src.forward_model import CFA
......
"""A file containing the main function for the chosen interpolation (Adams-Hamilton Algorithm).
"""
### BE CAREFUL TO ADD THE MODULE TORCH TO THE FILE requirements.txt
import numpy as np
from scipy.signal import convolve2d
import torch
import torch.nn.functional as F
from src.forward_model import CFA
# Initial kernels for the bilinear interpolation
ker_bayer_red_blue = np.array([[1, 2, 1],
[2, 4, 2],
[1, 2, 1]]) / 4
ker_bayer_green = np.array([[0, 1, 0],
[1, 4, 1],
[0, 1, 0]]) / 4
# Kernels for the quad_bayer filter
ker_quad_red_blue = np.array([[1, 1, 2, 2, 0, 1],
[1, 1, 2, 2, 1, 1],
[2, 2, 4, 4, 2, 2],
[2, 2, 4, 4, 2, 2],
[1, 1, 2, 2, 1, 1],
[1, 1, 2, 2, 1, 1]])/16
ker_quad_green = np.array([[0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 0, 0],
[1, 1, 4, 4, 1, 1],
[1, 1, 4, 4, 1, 1],
[0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 0, 0]])/16
def second_naive_interpolation(op: CFA, y: np.ndarray) -> np.ndarray:
"""Performs a naive interpolation of the lost pixels. for the quad_bayer, performs a convolution
with kernels (above: ker_quad_red_blue and ker_quad_green) inspired from the classic bilnear interpolation
Args:
op (CFA): CFA operator.
y (np.ndarray): Mosaicked image.
Returns:
np.ndarray: Demosaicked image.
"""
z = op.adjoint(y)
if op.cfa == 'bayer':
res = np.empty(op.input_shape)
res[:, :, 0] = convolve2d(z[:, :, 0], ker_bayer_red_blue, mode='same')
res[:, :, 1] = convolve2d(z[:, :, 1], ker_bayer_green, mode='same')
res[:, :, 2] = convolve2d(z[:, :, 2], ker_bayer_red_blue, mode='same')
else:
res = np.empty(op.input_shape)
res[:, :, 0] = convolve2d(z[:, :, 0], ker_quad_red_blue, mode='same')
res[:, :, 1] = convolve2d(z[:, :, 1], ker_quad_green, mode='same')
res[:, :, 2] = convolve2d(z[:, :, 2], ker_quad_red_blue, mode='same')
#res[:, :, 0] = convolution_pad_stride(z[:, :, 0], ker_quad_red_blue)
#res[:, :, 1] = convolution_pad_stride(z[:, :, 1], ker_quad_green)
#res[:, :, 2] = convolution_pad_stride(z[:, :, 2], ker_quad_red_blue)
return res
import torch
import torch.nn.functional as F
def convolution_pad_stride(input, kernel):
"""An attempt of convolution with stride and padding.
Args:
input(np.ndarray): input (mosaicked) image.
kernel (np.ndarray): convolution kernel.
Returns:
np.ndarray: Convolution of the image with the kernel with stride of 2 and padding of 512.
"""
input_tensor = torch.tensor(np.expand_dims(input, axis=(0,1)))
kernel_tensor = torch.tensor(np.expand_dims(kernel, axis=(0,1)))
output = F.conv2d(input_tensor, kernel_tensor, stride=2, padding=514)
output = output.numpy().squeeze()
return output
import numpy as np
def take(array2d, i, j):
"""
Helper function that returns the indices of an array, prevents index outbound
Args:
array2d (np.ndarray): input array
i: row index
j: column index
Returns:
np.float64(0): array value at position[i,j]
"""
if 0 <= i < array2d.shape[0] and 0 <= j < array2d.shape[1]:
return array2d[i, j]
return np.float64(0)
def red_blue_positions(img):
"""
Helper function that yields the red and blue positions in the mosaicked image
Args:
img (np.ndarray): mosaicked image
Returns:
None
"""
first_non_green = 1
for i in range(img.shape[0]):
for j in range(first_non_green, img.shape[1], 2):
yield i, j
first_non_green = 1 - first_non_green
def directional_green_interpolation(img):
"""
Function that performs green interpolation in horizontal and vertical directions.
Args:
img (np.ndarray): input (mosaicked) image
Returns:
green_h, green_v (np.ndarray): horizontally and vertically interpolated green components
"""
green_h = img.copy() # green positions are copied
green_v = img.copy() # other values will be replaced
for i, j in red_blue_positions(img):
r = lambda k: take(img, i, j + k) # r - relative indexing
green_h[i, j] = (r(1) + r(-1) + r(0)) / 2 - (r(2) + r(-2)) / 4
r = lambda k: take(img, i + k, j)
green_v[i, j] = (r(1) + r(-1) + r(0)) / 2 - (r(2) + r(-2)) / 4
return green_h, green_v
def green_decision(img, green_h, green_v, cardinal_directions_improvement = True):
"""
Function that performs the green decision between the chrominance components based on the color difference uniformity
by calculating the horizontal and the vertical gradients.
Args:
img (np.ndarray): input (mosaicked) image
green_h (np.ndarray): horizontally interpolated green component
green_v (np.ndarray): vertically interpolated green component
cardinal_directions_improvement (bool) = True (default) : parameter that allows to adjust a given window weight in order
to improve the gradients calculation
Returns:
green (np.ndarray): interpolated green image
delta_h (np.ndarray): horizontal gradient image
delta_v (np.ndarray): vertical gradient image
"""
height, width = img.shape
# "chrominance" is R - G in red locations, B - G in blue locations
# and 0 in green locations
chrominance_h = img - green_h
chrominance_v = img - green_v
# also 0 in green locations, this will be useful
gradient_h = chrominance_h.copy()
gradient_v = chrominance_v.copy()
for i, j in red_blue_positions(img):
gradient_h[i, j] -= take(chrominance_h, i, j + 2)
gradient_v[i, j] -= take(chrominance_v, i + 2, j)
gradient_h = np.abs(gradient_h)
gradient_v = np.abs(gradient_v)
# could be easily rewritten without loops
window = np.ones(shape=(5, 5), dtype=np.float64)
if cardinal_directions_improvement:
window[2, :] = 3
window[:, 2] = 3
delta_h = np.zeros(shape=(img.shape), dtype=np.float64)
delta_v = delta_h.copy()
padded_grad_h = np.zeros(shape=(img.shape[0] + 4, img.shape[1] + 4), dtype=np.float64)
padded_grad_v = padded_grad_h.copy()
padded_grad_h[2 : img.shape[0] + 2, 2 : img.shape[1] + 2] = gradient_h
padded_grad_v[2 : img.shape[0] + 2, 2 : img.shape[1] + 2] = gradient_v
green = green_h.copy()
for i, j in red_blue_positions(img):
delta_h[i, j] = np.sum(window * padded_grad_h[i : i + 5, j : j + 5])
delta_v[i, j] = np.sum(window * padded_grad_v[i : i + 5, j : j + 5])
if delta_v[i, j] < delta_h[i, j]:
green[i, j] = green_v[i, j]
return green, delta_h, delta_v
def red_blue_interpolation(img, green, delta_h, delta_v):
"""
Function that performs the red and blue components interpolation.
Args:
img (np.ndarray): input (mosaicked) image
green (np.ndarray): interpolated green image
delta_h (np.ndarray): horizontal gradient image
delta_v (np.ndarray): vertical gradient image
Returns:
red, blue (np.ndarray): interpolated red and blue image components
"""
height, width = img.shape
red = img.copy()
blue = img.copy()
# green positions first
for i in range(0, height, 2): # green-red rows
for j in range(0, width, 2):
red[i, j] = (take(img, i, j - 1) +
take(img, i, j + 1)) / 2
blue[i, j] = (take(img, i - 1, j) +
take(img, i + 1, j)) / 2
for i in range(1, height, 2): # green-blue rows
for j in range(1, width, 2):
blue[i, j] = (take(img, i, j - 1) +
take(img, i, j + 1)) / 2
red[i, j] = (take(img, i - 1, j) +
take(img, i + 1, j)) / 2
# now red in blue positions, blue in red positions
red_minus_blue = red - blue
for i in range(1, height, 2):
for j in range(0, width, 2):
if delta_v[i, j] < delta_h[i, j]:
red[i, j] = blue[i, j] + (take(red_minus_blue, i - 1, j) +
take(red_minus_blue, i + 1, j)) / 2
else:
red[i, j] = blue[i, j] + (take(red_minus_blue, i, j - 1) +
take(red_minus_blue, i, j + 1)) / 2
for i in range(0, height, 2):
for j in range(1, width, 2):
if delta_v[i, j] < delta_h[i, j]:
blue[i, j] = red[i, j] - (take(red_minus_blue, i - 1, j) +
take(red_minus_blue, i + 1, j)) / 2
else:
blue[i, j] = red[i, j] - (take(red_minus_blue, i, j - 1) +
take(red_minus_blue, i, j + 1)) / 2
return red, blue
def demosaicking_algorithm(img):
"""
Main function of the Daniele Menon demosaicking algorithm.
Args:
img (np.ndarray): input (mosaicked image)
Returns:
np.ndarray: reconstructed image
"""
green_h, green_v = directional_green_interpolation(img)
green, delta_h, delta_v = green_decision(img, green_h, green_v)
red, blue = red_blue_interpolation(img, green, delta_h, delta_v)
return np.clip(np.dstack((red, green, blue)), 0, 1)
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
\ No newline at end of file
File added
source diff could not be displayed: it is too large. Options to address this: view the blob.
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.forward_model import CFA
from src.methods.nada_kouddane.functions import second_naive_interpolation, demosaicking_algorithm
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
res = demosaicking_algorithm(y)
return res
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
File added
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.forward_model import CFA
from src.methods.quelletl.some_function import interpolation
import cv2
from sklearn.ensemble import RandomForestRegressor
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error
from scipy.signal import convolve2d
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
res = interpolation(y, op)
return res
def quad_to_bayer(y):
for i in range(1, y.shape[0], 4):
save = np.copy(y[:,i])
y[:,i] = y[:,i+1]
y[:,i+1] = save
for j in range(1, y.shape[0], 4):
save = np.copy(y[j,:])
y[j,:] = y[j+1,:]
y[j+1,:] = save
for i in range(1, y.shape[0], 4):
for j in range(1, y.shape[0], 4):
save = np.copy(y[i,j])
y[i,j] = y[i+1,j+1]
y[i+1,j+1] = save
return y
ker_bayer_green = np.array([[0, 1, 0], [1, 4, 1], [0, 1, 0]]) / 4
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
import numpy as np
def bayer_blue_red(res, y, i, j) :
"""
Compute estimated blue/red pixel in red/blue bayer pixel
Args :
res : estimated image
y : image to reconstruct
i, j : indices
Return :
value : value of the estimated pixels
"""
K2 = res[i-1,j-1,1] - y[i-1,j-1]
K4 = res[i-1,j+1,1] - y[i-1,j+1]
K10 = res[i+1,j-1,1] - y[i+1,j-1]
K12 = res[i+1,j+1,1] - y[i+1,j+1]
value = res[i,j,1]-1/4*(K2+K4+K10+K12)
return value
def bayer_green_vert(res, y, i, j) :
"""
Compute estimated blue/red pixel in green bayer pixel in vertical direction
Args :
res : estimated image
y : image to reconstruct
i, j : indices
Return :
value : value of the estimated pixels
"""
k1 = res[i-1,j,1] - y[i-1,j]
k2 = res[i+1,j,1] - y[i+1,j]
value = y[i,j] - 1/2*(k1+k2)
return value
def bayer_green_hor(res, y, i, j):
"""
Compute estimated blue/red pixel in green bayer pixel in horizontal direction
Args :
res : estimated image
y : image to reconstruct
i, j : indices
Return :
value : value of the estimated pixels
"""
k1 = res[i,j-1,1] - y[i,j-1]
k2 = res[i,j+1,1] - y[i,j+1]
value = y[i,j] - 1/2*(k1+k2)
return value
def interpolate_green(res, y, z):
"""
Directional interpolation of the green channel
Args :
res : estimated image
y : image to reconstruct
z : bayer pattern
Return :
res : reconstructed image
"""
for i in range(2,y.shape[0]-1):
for j in range(2,y.shape[1]-1):
# Vertical and horizontal gradient
if z[i,j,1]==0:
d_h = np.abs(y[i,j-1]-y[i,j+1])
d_v = np.abs(y[i-1,j]-y[i+1,j])
if d_h > d_v:
green = (y[i-1,j]+y[i+1,j])/2
elif d_v > d_h:
green = (y[i,j-1]+y[i,j+1])/2
else :
green = (y[i,j-1]+y[i,j+1]+y[i-1,j]+y[i+1,j])/4
else :
green = y[i,j]
res[i,j,1] = green
return res
def quad_to_bayer(y):
"""
Convert Quad Bayer to Bayer
Args :
res : estimated image
y : image to reconstruct
i, j : indices
Return :
value : value of the estimated pixels
"""
for i in range(1, y.shape[0], 4):
save = np.copy(y[:,i])
y[:,i] = y[:,i+1]
y[:,i+1] = save
for j in range(1, y.shape[0], 4):
save = np.copy(y[j,:])
y[j,:] = y[j+1,:]
y[j+1,:] = save
for i in range(1, y.shape[0], 4):
for j in range(1, y.shape[0], 4):
save = np.copy(y[i,j])
y[i,j] = y[i+1,j+1]
y[i+1,j+1] = save
return y
def interpolation(y, op):
"""
Reconstruct image
Args :
y : image to reconstruct
op : CFA operator
Return :
np.ndarray: Demosaicked image.
"""
if op.cfa == 'quad_bayer':
y = quad_to_bayer(y)
op.mask = quad_to_bayer(op.mask)
z = op.adjoint(y)
res = np.empty(op.input_shape)
# Interpolation of green channel
res = interpolate_green(res, y, z)
# Interpolation of R and B channels using channel correlation
for i in range(2,y.shape[0]-2):
for j in range(2, y.shape[1]-2):
# Bayer is Green
if z[i,j,1] != 0 :
# Green is between 2 vertical bleu
if z[i+1,j,0] == 0:
red = bayer_green_hor(res, y, i, j) # Compute Red channel
blue = bayer_green_vert(res, y, i, j) # Compute Blue channel
# Green is between 2 vertical red
else:
blue = bayer_green_hor(res, y, i, j) # Compute Blue channel
red = bayer_green_vert(res, y, i, j) # Compute Red channel
# Bayer is red
elif z[i,j,0] != 0 :
red = y[i,j] # Red channel
blue = bayer_blue_red(res, y, i, j) # Blue channel
# Bayer is bleue
elif z[i,j,2] != 0 :
blue = y[i,j] # Bleu channel
red = bayer_blue_red(res, y, i, j) # Res channel
res[i,j,0] = np.clip(red, 0, 255)
res[i,j,2] = np.clip(blue,0,255)
return res
import numpy as np
from src.forward_model import CFA
import cv2 as cv2
def hamilton_adams(y, input_shape):
n,p = input_shape[0], input_shape[1]
z = np.copy(y)
for i in range(2 , n-2): #green interpolation by gradient comparison for every red and blue pixels
for j in range(2, p-2):
if z[i,j,1] == 0 and z[i,j,0] != 0: #red pixel
#Vertical and horizontal gradient
grad_y = np.abs(z[i-1,j,1] - z[i+1,j,1]) + np.abs(2*z[i,j,0] - z[i-2,j,0] - z[i+2,j,0])
grad_x = np.abs(z[i,j-1,1] - z[i,j+1,1]) + np.abs(2*z[i,j,0] - z[i,j-2,0] - z[i,j+2,0])
if grad_x < grad_y:
z[i,j,1] = (z[i,j-1,1] + z[i,j+1,1])/2 + (2*z[i,j,0] - z[i,j-2,0] - z[i,j+2,0])/4
elif grad_x > grad_y:
z[i,j,1] = (z[i-1,j,1] + z[i+1,j,1])/2 + (2*z[i,j,0] - z[i-2,j,0] - z[i+2,j,0])/4
else:
z[i,j,1] = (z[i-1,j,1] + z[i+1,j,1] + z[i,j-1,1] + z[i,j+1,1])/4 + (2*z[i,j,0] - z[i,j-2,0] - z[i,j+2,0] + 2*z[i,j,0] - z[i-2,j,0] - z[i+2,j,0])/8
elif z[i,j,1] == 0 and z[i,j,2] != 0: #blue pixel
#Vertical and horizontal gradient
grad_y = np.abs(z[i-1,j,1] - z[i+1,j,1]) + np.abs(2*z[i,j,2] - z[i-2,j,2] - z[i+2,j,2])
grad_x = np.abs(z[i,j-1,1] - z[i,j+1,1]) + np.abs(2*z[i,j,2] - z[i,j-2,2] - z[i,j+2,2])
if grad_x < grad_y:
z[i,j,1] = (z[i,j-1,1] + z[i,j+1,1])/2 + (2*z[i,j,2] - z[i,j-2,2] - z[i,j+2,2])/4
elif grad_x > grad_y:
z[i,j,1] = (z[i-1,j,1] + z[i+1,j,1])/2 + (2*z[i,j,2] - z[i-2,j,2] - z[i+2,j,2])/4
else:
z[i,j,1] = (z[i-1,j,1] + z[i+1,j,1] + z[i,j-1,1] + z[i,j+1,1])/4 + (2*z[i,j,2] - z[i,j-2,2] - z[i,j+2,2] + 2*z[i,j,2] - z[i-2,j,2] - z[i+2,j,2])/8
for i in range(1 , n-1): #red/blue interpolation by bilinear interpolation on blue/red pixels
for j in range(1, p-1):
if z[i,j,2] != 0 :
z[i,j,0] = (z[i-1,j-1,0] + z[i-1,j+1,0] + z[i+1,j-1,0] + z[i+1,j+1,0]) / 4
elif z[i,j,0] != 0:
z[i,j,2] = (z[i-1,j-1,2] + z[i-1,j+1,2] + z[i+1,j-1,2] + z[i+1,j+1,2]) / 4
else:
z[i,j] = z[i,j]
for i in range(1 , n-1): #blue and red interpolation by bilinear interpolation on green pixels
for j in range(1, p-1):
if z[i,j,0] == z[i,j,2]:
z[i,j,0] = (z[i-1,j,0] + z[i,j-1,0] + z[i+1,j,0] + z[i,j+1,0]) / 4
z[i,j,2] = (z[i-1,j,2] + z[i,j-1,2] + z[i+1,j,2] + z[i,j+1,2]) / 4
return z
# def SSD(y, cfa_img, input_shape): #SSD algo
# n,p = input_shape[0], input_shape[1]
# hlist = [16,4,1]
# res = np.copy(y)
# for h in hlist:
# res = NLh(res, cfa_img ,n,p,h)
# res = CR(res,n,p)
# return res
# def NLh(y, cfa_img, n, p, h): #NLh part
# res = np.copy(cfa_img)
# for i in range(4,n-3):
# for j in range(4,p-3):
# if cfa_img[i,j,0] != 0:
# res[i,j,1] = NLh_calc(y, cfa_img, i, j, 1, h)
# res[i,j,2] = NLh_calc(y, cfa_img, i, j, 2, h)
# print((i,j))
# elif cfa_img[i,j,1] != 0:
# res[i,j,0] = NLh_calc(y, cfa_img, i, j, 0, h)
# res[i,j,2] = NLh_calc(y, cfa_img, i, j, 2, h)
# print((i,j))
# else:
# res[i,j,0] = NLh_calc(y, cfa_img, i, j, 0, h)
# res[i,j,1] = NLh_calc(y, cfa_img, i, j, 1, h)
# print((i,j))
# return res
# def NLh_calc(y, cfa_img, i, j, channel, h): #aux function to calculate the main sum for each pixel
# sum = 0
# norm = 0
# for k in range(-2,3):
# for l in range(-2,3):
# if k!=0 and j!=0:
# a = poids(y, channel, i,j,k,l,h)
# sum += a * cfa_img[i+k,j+l,channel]
# norm += a
# return sum / norm
# def poids(y, channel, i,j,k,l,h): #aux function to calcultate the weight for a given p=(i,j) , q=(k,l)
# som = 0
# # for tx in range(-1,2):
# # for ty in range(-1,2):
# som += (np.abs(y[i-1 , j-1, channel] - y[k+1 , l-1, channel]))**2
# som += (np.abs(y[i-1 , j, channel] - y[k+1 , l, channel]))**2
# som += (np.abs(y[i-1 , j+1, channel] - y[k+1 , l+1, channel]))**2
# som += (np.abs(y[i, j-1, channel] - y[k, l-1, channel]))**2
# som += (np.abs(y[i, j, channel] - y[k, l, channel]))**2
# som += (np.abs(y[i, j+1, channel] - y[k, l+1, channel]))**2
# som += (np.abs(y[i+1 , j-1, channel] - y[k+1 , l-1, channel]))**2
# som += (np.abs(y[i+1 , j, channel] - y[k+1 , l, channel]))**2
# som += (np.abs(y[i+1 , j+1, channel] - y[k+1 , l+1, channel]))**2
# res = np.exp((-1/h**2) * som)
# return res
# def CR(img_rgb): #Chrominance median
# res = cv2.cvtColor(img_rgb, cv2.COLOR_RGB2YUV)
# y, u, v = cv2.split(res)
# U_med = cv2.medianBlur(u, 3)
# V_med = cv2.medianBlur(v, 3)
# y = cv2.cvtColor(y, cv2.COLOR_GRAY2RGB)
# u = cv2.cvtColor(u, cv2.COLOR_GRAY2RGB)
# v = cv2.cvtColor(v, cv2.COLOR_GRAY2RGB)
# res = np.vstack([y, u, v])
# return res
"""The main file for the reconstruction.
This file should NOT be modified except the body of the 'run_reconstruction' function.
Students can call their functions (declared in others files of src/methods/your_name).
"""
import numpy as np
from src.forward_model import CFA
from src.methods.ramanantsitonta_harizo.fonctions import hamilton_adams #, SSD
def run_reconstruction(y: np.ndarray, cfa: str) -> np.ndarray:
"""Performs demosaicking on y.
Args:
y (np.ndarray): Mosaicked image to be reconstructed.
cfa (str): Name of the CFA. Can be bayer or quad_bayer.
Returns:
np.ndarray: Demosaicked image.
"""
# Performing the reconstruction.
# TODO
input_shape = (y.shape[0], y.shape[1], 3)
op = CFA(cfa, input_shape)
cfa_img = op.adjoint(y)
res = hamilton_adams(cfa_img, input_shape)
#res = SSD(res, cfa_img, input_shape)
return res
####
####
####
#### #### #### #############
#### ###### #### ##################
#### ######## #### ####################
#### ########## #### #### ########
#### ############ #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ######## #### #### ####
#### #### ## ###### #### #### ######
#### #### #### ## #### #### ############
#### #### ###### #### #### ##########
#### #### ########## #### #### ########
#### #### ######## #### ####
#### #### ############ ####
#### #### ########## ####
#### #### ######## ####
#### #### ###### ####
# 2023
# Authors: Mauro Dalla Mura and Matthieu Muller
File added