From b28979fe2e9040bf7711f1eb7e4dbfb029a5ecea Mon Sep 17 00:00:00 2001
From: Jean-Baptiste Keck <Jean-Baptiste.Keck@imag.fr>
Date: Tue, 22 May 2018 17:02:36 +0200
Subject: [PATCH] bubble example

---
 examples/bubble/periodic_bubble.py            | 317 ++++++++++++++++++
 .../device/opencl/operator/integrate.py       |  37 ++
 hysop/core/memory/memory_request.py           |   2 +-
 hysop/defaults.py                             |   5 +-
 hysop/fields/default_fields.py                |  15 +
 hysop/operator/base/integrate.py              | 109 ++++++
 hysop/operator/base/min_max.py                |   3 +-
 hysop/operator/directional/diffusion_dir.py   |   4 +-
 hysop/operator/integrate.py                   | 100 ++++++
 hysop/operators.py                            |   1 +
 hysop/parameters/default_parameters.py        |  19 +-
 hysop/parameters/tensor_parameter.py          |   4 +
 12 files changed, 610 insertions(+), 6 deletions(-)
 create mode 100644 examples/bubble/periodic_bubble.py
 create mode 100644 hysop/backend/device/opencl/operator/integrate.py
 create mode 100644 hysop/operator/base/integrate.py
 create mode 100644 hysop/operator/integrate.py

diff --git a/examples/bubble/periodic_bubble.py b/examples/bubble/periodic_bubble.py
new file mode 100644
index 000000000..5c088b737
--- /dev/null
+++ b/examples/bubble/periodic_bubble.py
@@ -0,0 +1,317 @@
+
+## HySoP Example: Bubble nD
+## Osher1995 (first part): 
+##  A Level Set Formulation of Eulerian Interface Capturing Methods for 
+##  Incompressible Fluid flows.
+
+import os
+import numpy as np
+
+Bc = ((0.5,0.5,0.5),)
+Br = (0.25,)
+
+def bubble_mask(coords, Bc, Br):
+    assert len(Bc)==len(Br)>=1
+    dim = len(coords)
+    shape = tuple(c.size for c in coords[::-1])
+    mask = np.zeros(shape=shape, dtype=np.int8)
+    for (center, radius) in zip(Bc, Br):
+        center = center[:dim]
+        D = np.zeros(shape=shape, dtype=np.float64)
+        for (Xi,Ci) in zip(coords, center):
+            D += (Xi-Ci)**2
+        mask |= (D<=radius**2)
+    return mask
+
+def init_rho(data, coords, Bc, Br, rho1, rho2):
+    mask = bubble_mask(coords, Bc, Br)
+    data[0][...] = mask*rho1 + (1-mask)*rho2
+    
+def init_mu(data, coords, Bc, Br, mu1, mu2):
+    mask = bubble_mask(coords, Bc, Br)
+    data[0][...] = mask*mu1 + (1-mask)*mu2
+
+def init_vorticity(data, coords):
+    for d in data:
+        d[...] = 0.0
+
+def compute(args):
+    from hysop import Box, Simulation, Problem, MPIParams, IOParams
+    from hysop.defaults import VelocityField, VorticityField, \
+                               DensityField, ViscosityField, \
+                               EnstrophyParameter, TimeParameters, \
+                               VolumicIntegrationParameter
+    from hysop.constants import Implementation, AdvectionCriteria
+
+    from hysop.operators import DirectionalAdvection, DirectionalDiffusion,        \
+                                DirectionalStretchingDiffusion,                    \
+                                PoissonRotational, AdaptiveTimeStep,               \
+                                Enstrophy, MinMaxFieldStatistics, StrangSplitting, \
+                                ParameterPlotter, Integrate, HDF_Writer
+
+    from hysop.methods import SpaceDiscretization, Remesh, TimeIntegrator, \
+                              ComputeGranularity, Interpolation
+
+    # Define the domain
+    dim  = args.ndim
+    npts = args.npts
+    box  = Box(origin=args.box_origin, length=args.box_length, dim=dim)
+    
+    # Get default MPI Parameters from domain (even for serial jobs)
+    mpi_params = MPIParams(comm=box.task_comm,
+                           task_id=box.current_task())
+    
+    # Setup usual implementation specific variables
+    impl = args.impl
+    extra_op_kwds = {'mpi_params': mpi_params}
+    if (impl is Implementation.PYTHON):
+        method = {}
+    elif (impl is Implementation.OPENCL):
+        # For the OpenCL implementation we need to setup the compute device
+        # and configure how the code is generated and compiled at runtime.
+                
+        # Create an explicit OpenCL context from user parameters
+        from hysop.backend.device.opencl.opencl_tools import get_or_create_opencl_env
+        cl_env = get_or_create_opencl_env(mpi_params=mpi_params, 
+                                          platform_id=args.cl_platform_id, 
+                                          device_id=args.cl_device_id)
+        
+        # Configure OpenCL kernel generation and tuning (already done by HysopArgParser)
+        from hysop.methods import OpenClKernelConfig
+        method = { OpenClKernelConfig: args.opencl_kernel_config }
+        
+        # Setup opencl specific extra operator keyword arguments
+        extra_op_kwds['cl_env'] = cl_env
+    else:
+        msg='Unknown implementation \'{}\'.'.format(impl)
+        raise ValueError(msg)
+    
+    # Define parameters and field (time, timestep, velocity, vorticity, enstrophy)
+    t, dt = TimeParameters(dtype=args.dtype)
+    velo  = VelocityField(domain=box, dtype=args.dtype)
+    vorti = VorticityField(domain=box, dtype=args.dtype)
+    rho   = DensityField(domain=box, dtype=args.dtype) 
+    mu    = ViscosityField(domain=box, dtype=args.dtype, mu=True)
+
+    enstrophy = EnstrophyParameter(dtype=args.dtype)
+    rhov = VolumicIntegrationParameter(field=rho)
+    muv  = VolumicIntegrationParameter(field=mu)
+    
+    ### Build the directional operators
+    #> Directional advection 
+    advec = DirectionalAdvection(implementation=impl,
+            name='advec',
+            velocity = velo,       
+            advected_fields = (vorti, rho, mu),
+            velocity_cfl = args.cfl,
+            variables = {velo: npts, vorti: npts, rho: npts, mu: npts},
+            dt=dt, **extra_op_kwds)
+    #> Directional stretching + diffusion
+    if (dim==3):
+        stretch_diffuse = DirectionalStretchingDiffusion(implementation=impl,
+                 name='stretch_diffuse',
+                 formulation = args.stretching_formulation,
+                 viscosity = mu,
+                 velocity  = velo,       
+                 vorticity = vorti,
+                 variables = {velo: npts, vorti: npts, mu: npts},
+                 dt=dt, **extra_op_kwds)
+    elif (dim==2):
+        stretch_diffuse = DirectionalDiffusion(implementation=impl,
+                 name='stretch_diffuse',
+                 coeffs = mu,
+                 fields  = vorti,
+                 variables = {vorti: npts, mu: npts},
+                 dt=dt, **extra_op_kwds)
+    else:
+        msg='Unsupported dimension {}.'.format(dim)
+        raise RuntimeError(msg)
+
+    #> Directional splitting operator subgraph
+    splitting = StrangSplitting(splitting_dim=dim, order=args.strang_order)
+    splitting.push_operators(advec, stretch_diffuse)
+    
+    ### Build standard operators
+    #> Poisson operator to recover the velocity from the vorticity
+    poisson = PoissonRotational(name='poisson', velocity=velo, vorticity=vorti, 
+                            variables={velo:npts, vorti: npts}, 
+                            projection=args.reprojection_frequency,
+                            implementation=impl, **extra_op_kwds)
+
+    #> Operators to dump rho and mu
+    io_params = IOParams(filename='fields', frequency=args.dump_freq)
+    dump_fields = HDF_Writer(name='hdf_writer',
+                             io_params=io_params,
+                             variables={velo: npts, 
+                                        vorti: npts,    
+                                        rho: npts, 
+                                        mu: npts})
+    
+    #> Operator to compute the infinite norm of the velocity
+    min_max_U = MinMaxFieldStatistics(name='min_max_U', field=velo,
+            Finf=True, implementation=impl, variables={velo:npts},
+            **extra_op_kwds)
+    #> Operator to compute the infinite norm of the vorticity
+    min_max_W = MinMaxFieldStatistics(name='min_max_W', field=vorti,
+            Finf=True, implementation=impl, variables={vorti:npts},
+            **extra_op_kwds)
+    #> Operator to track min and max density and viscosity
+    min_max_rho = MinMaxFieldStatistics(name='min_max_rho', field=rho,
+            Fmin=True, Fmax=True,
+            implementation=impl, variables={rho:npts},
+            **extra_op_kwds)
+    min_max_mu = MinMaxFieldStatistics(name='min_max_mu', field=mu,
+            Fmin=True, Fmax=True,
+            implementation=impl, variables={mu:npts},
+            **extra_op_kwds)
+    #> Operators to compute the integrated enstrophy, density and viscosity
+    integrate_enstrophy = Enstrophy(name='enstrophy', vorticity=vorti, enstrophy=enstrophy,
+            variables={vorti:npts}, implementation=impl, **extra_op_kwds)
+    integrate_rho = Integrate(name='integrate_rho', field=rho, variables={rho: npts},
+                                    parameter=rhov, scaling='normalize',
+                                    implementation=impl, **extra_op_kwds)
+    integrate_mu = Integrate(name='integrate_mu', field=mu, variables={mu: npts},
+                                    parameter=muv, scaling='normalize',
+                                    implementation=impl, **extra_op_kwds)
+    
+
+    ### Adaptive timestep operator
+    adapt_dt = AdaptiveTimeStep(dt, equivalent_CFL=True, max_dt=0.1)
+    dt_cfl   = adapt_dt.push_cfl_criteria(cfl=args.cfl, Finf=min_max_U.Finf,
+                                            equivalent_CFL=True)
+    dt_advec = adapt_dt.push_advection_criteria(lcfl=args.lcfl, Finf=min_max_W.Finf,
+                                                    criteria=AdvectionCriteria.W_INF)
+    
+    ## Create the problem we want to solve and insert our 
+    # directional splitting subgraph and the standard operators.
+    # The method dictionnary passed to this graph will be dispatched
+    # accross all operators contained in the graph.
+    method.update(
+            { 
+               ComputeGranularity:    args.compute_granularity,
+               SpaceDiscretization:   args.fd_order,
+               TimeIntegrator:        args.time_integrator,
+               Remesh:                args.remesh_kernel,
+               Interpolation:         args.interpolation
+            }
+    )
+    problem = Problem(method=method)
+    problem.insert(poisson, 
+                   dump_fields,
+                   splitting, 
+                   # integrate_enstrophy, integrate_rho, integrate_mu,
+                   min_max_rho, min_max_mu, min_max_U, min_max_W, 
+                   adapt_dt)
+    problem.build()
+
+    # If a visu_rank was provided, and show_graph was set,
+    # display the graph on the given process rank.
+    if args.display_graph:
+        problem.display(args.visu_rank)
+    
+    # Create a simulation
+    # (do not forget to specify the t and dt parameters here)
+    simu = Simulation(start=args.tstart, end=args.tend, 
+                      nb_iter=args.nb_iter,
+                      max_iter=args.max_iter,
+                      dt0=args.dt, times_of_interest=args.dump_times,
+                      t=t, dt=dt)
+    simu.write_parameters(t, dt_cfl, dt_advec, dt, 
+            enstrophy, rhov, muv,
+            min_max_U.Finf, min_max_W.Finf, 
+            min_max_rho.Fmin, min_max_rho.Fmax,
+            min_max_mu.Fmin, min_max_mu.Fmax,
+            adapt_dt.equivalent_CFL,
+            filename='parameters.txt', precision=8)
+    
+    # Initialize only the vorticity
+    dfields = problem.input_discrete_fields
+    dfields[vorti].initialize(formula=init_vorticity)
+    dfields[rho].initialize(formula=init_rho, rho1=args.rho1, rho2=args.rho2, Bc=Bc, Br=Br)
+    dfields[mu].initialize(formula=init_mu, mu1=args.mu1, mu2=args.mu2, Bc=Bc, Br=Br)
+
+    # Finally solve the problem 
+    problem.solve(simu, dry_run=args.dry_run)
+    
+    # Finalize
+    problem.finalize()
+
+
+if __name__=='__main__':
+    from examples.example_utils import HysopArgParser, colors
+
+    class PeriodicBubbleArgParser(HysopArgParser):
+        def __init__(self):
+            prog_name = 'periodic_bubble'
+            default_dump_dir = '{}/hysop_examples/{}'.format(HysopArgParser.tmp_dir(), 
+                    prog_name)
+
+            description=colors.color('HySoP Periodic Bubble Example: ', fg='blue', style='bold')
+            description+=colors.color('[Osher 1995] (first part)', fg='yellow', style='bold')
+            description+=colors.color('\nA Level Set Formulation of Eulerian Interface Capturing Methods for '
+                    +'Incompressible Fluid flows.',
+                    fg='yellow')
+            description+='\n'
+            description+='\nThis example focuses on a validation study for the '
+            description+='hybrid particle-mesh vortex method for varying densities without using a levelset function.'
+    
+            super(PeriodicBubbleArgParser, self).__init__(
+                 prog_name=prog_name,
+                 description=description,
+                 default_dump_dir=default_dump_dir)
+
+        def _add_main_args(self):
+            args = super(PeriodicBubbleArgParser, self)._add_main_args()
+            args.add_argument('-rho1', '--bubble-density', type=float,
+                                dest='rho1',
+                                help='Set the density of the bubbles.')
+            args.add_argument('-rho2', '--fluid-density', type=float,
+                                dest='rho2',
+                                help='Set the density of the fluid surrounding the bubbles.')
+            args.add_argument('-mu1', '--bubble-viscosity', type=float,
+                                dest='mu1',
+                                help='Set the viscosity of the bubbles.')
+            args.add_argument('-mu2', '--fluid-viscosity', type=float,
+                                dest='mu2',
+                                help='Set the viscosity of the fluid surrounding the bubbles.')
+            return args
+
+        def _check_main_args(self, args):
+            super(PeriodicBubbleArgParser, self)._check_main_args(args)
+            vars_ = ('rho1', 'rho2', 'mu1', 'mu2')
+            self._check_default(args, vars_, float, allow_none=False)
+            self._check_positive(args, vars_, strict=False, allow_none=False)
+            
+        def _add_graphical_io_args(self):
+            graphical_io = super(PeriodicBubbleArgParser, self)._add_graphical_io_args()
+            graphical_io.add_argument('-pp', '--plot-parameters', action='store_true',
+                    dest='plot_parameters',
+                    help=('Plot the density and viscosity integrals during simulation. '+
+                         'Simulation will stop at each time of interest and '+
+                         'the plot will be updated every specified freq iterations.'))
+            graphical_io.add_argument('-pf', '--plot-freq', type=int, default=100, 
+                    dest='plot_freq',
+                    help='Plotting update frequency in terms of iterations.')
+        
+        def _check_file_io_args(self, args):
+            super(PeriodicBubbleArgParser, self)._check_file_io_args(args)
+            self._check_default(args, 'plot_parameters', bool, allow_none=False)
+            self._check_default(args, 'plot_freq', int, allow_none=False)
+            self._check_positive(args, 'plot_freq', strict=True, allow_none=False)
+            
+        def _setup_parameters(self, args):
+            if (args.ndim not in (2,3)):
+                msg='This example only works for 2D and 3D domains.'
+                self.error(msg)
+
+    parser = PeriodicBubbleArgParser()
+
+    parser.set_defaults(impl='cl', ndim=2, npts=(257,),
+                        box_origin=(0.0,), box_length=(1.0,), 
+                        tstart=0.0, tend=0.50, 
+                        dt=1e-5, cfl=0.5, lcfl=0.125,
+                        dump_freq=0, 
+                        dump_times=(0.0, 0.1, 0.20, 0.30, 0.325, 0.4, 0.45),
+                        rho1=1.0, rho2=10.0, mu1=0.00025, mu2=0.00050)
+
+    parser.run(compute)
diff --git a/hysop/backend/device/opencl/operator/integrate.py b/hysop/backend/device/opencl/operator/integrate.py
new file mode 100644
index 000000000..2e405db45
--- /dev/null
+++ b/hysop/backend/device/opencl/operator/integrate.py
@@ -0,0 +1,37 @@
+
+from hysop.tools.decorators import debug
+from hysop.backend.device.opencl.opencl_operator import OpenClOperator, op_apply
+from hysop.operator.base.integrate import IntegrateBase
+
+class OpenClIntegrate(IntegrateBase, OpenClOperator):
+
+    @debug
+    def __init__(self, **kwds):
+        super(OpenClIntegrate, self).__init__(**kwds)
+    
+    @debug
+    def get_field_requirements(self):
+        # force 0 ghosts for the reduction (pyopencl reduction kernel)
+        requirements = super(OpenClIntegrate, self).get_field_requirements()
+        topo, req = requirements.get_input_requirement(self.field)
+        req.max_ghosts = (0,)*self.field.dim
+        return requirements
+    
+    @debug
+    def setup(self, work):
+        self.sum_kernels = tuple(self.dF.backend.sum(a=self.dF[i], build_kernel_launcher=True, async=True)
+                                    for i in xrange(self.dF.nb_components))
+    
+    @op_apply
+    def apply(self, **kwds):
+        queue = self.cl_env.default_queue
+        evts = ()
+        for knl in self.sum_kernels:
+            evt = knl(queue=queue)
+            evts += (evt,)
+        for (i,evt) in enumerate(evts):
+            evt.wait()
+            Pi = self.sum_kernel[i].out.get()[0]
+            if (self.scaling_coeff[i] is None):
+                self.scaling_coeff[i] = 1.0 / Pi
+            self.parameter[i] = self.scaling_coeff[i] * Pi
diff --git a/hysop/core/memory/memory_request.py b/hysop/core/memory/memory_request.py
index 7121a7b5d..e4d8def1c 100644
--- a/hysop/core/memory/memory_request.py
+++ b/hysop/core/memory/memory_request.py
@@ -452,7 +452,7 @@ class MultipleOperatorMemoryRequests(object):
                     precision = ''
                 ss+= '\n {}{}:'.format(backend.full_tag, precision)
                 ss+= template.format(*titles, **sizes)
-                for op in sorted(backend_requests.keys(), key=lambda op: op.name):
+                for op in sorted(backend_srequests.keys(), key=lambda op: op.name):
                     sop_reqs = backend_srequests[op]
                     for sreq in sop_reqs:
                         ss+= template.format(*sreq, **sizes)
diff --git a/hysop/defaults.py b/hysop/defaults.py
index 66ff5e441..1265a58cf 100644
--- a/hysop/defaults.py
+++ b/hysop/defaults.py
@@ -1,4 +1,5 @@
 
-from hysop.fields.default_fields import VelocityField, VorticityField
+from hysop.fields.default_fields import VelocityField, VorticityField, \
+                                        DensityField, ViscosityField
 from hysop.parameters.default_parameters import TimeParameters, \
-        EnstrophyParameter, KineticEnergyParameter
+        EnstrophyParameter, KineticEnergyParameter, VolumicIntegrationParameter
diff --git a/hysop/fields/default_fields.py b/hysop/fields/default_fields.py
index b23a25d79..0eea67820 100644
--- a/hysop/fields/default_fields.py
+++ b/hysop/fields/default_fields.py
@@ -26,3 +26,18 @@ def VorticityField(domain, name=None, pretty_name=None,
     return Field(domain=domain, name=name, pretty_name=pretty_name, 
                     nb_components=nb_components, **kwds)
 
+
+def DensityField(domain, name=None, pretty_name=None, **kwds):
+    name        = first_not_None(name, 'rho')
+    pretty_name = first_not_None(pretty_name, greak[16])
+    return Field(domain=domain, name=name, pretty_name=pretty_name, **kwds)
+
+
+def ViscosityField(domain, name=None, pretty_name=None, mu=False, **kwds):
+    if mu:
+        name = first_not_None(name, 'mu')
+        pretty_name = first_not_None(pretty_name, greak[11])
+    else:
+        name = first_not_None(name, 'nu')
+        pretty_name = first_not_None(pretty_name, greak[12])
+    return Field(domain=domain, name=name, pretty_name=pretty_name, **kwds)
diff --git a/hysop/operator/base/integrate.py b/hysop/operator/base/integrate.py
new file mode 100644
index 000000000..212635fe8
--- /dev/null
+++ b/hysop/operator/base/integrate.py
@@ -0,0 +1,109 @@
+
+
+from abc import ABCMeta
+
+from hysop.tools.types       import check_instance, to_tuple, first_not_None
+from hysop.tools.decorators  import debug
+from hysop.fields.continuous_field import Field
+
+from hysop.core.memory.memory_request import MemoryRequest
+from hysop.topology.cartesian_descriptor import CartesianTopologyDescriptors
+from hysop.parameters.scalar_parameter import ScalarParameter, TensorParameter
+
+class IntegrateBase(object):
+    """
+    Common implementation interface for field integration.
+    """
+
+    __metaclass__ = ABCMeta
+
+    @debug
+    def __init__(self, field, variables, 
+                    parameter=None, scaling=None, **kwds):
+        """
+        Initialize a Integrate operator base.
+
+        Integrate a field on it compute domain and put the result in a parameter.
+
+        in:  field
+            Possibly as multi-component field that should be integrated.
+        out: parameter
+             P = scaling * integral_V(field)
+             where V is the field domain volume
+             and scaling depends on specified scaling method.
+        
+        parameter
+        ----------
+        field: Field
+            Input continuous field to be integrated.
+        variables: dict
+            dictionary of fields as keys and topologies as values.
+        parameter: ScalarParameter or TensorParameter
+            The output parameter that will contain the integral.
+            Should match field.nb_components. 
+            A default parameter will be created if not specified.
+        scaling: None, float, str or array-like of str, optional
+            Scaling method used after integration.
+            'volumic':   scale by domain size (product of mesh space steps)
+            'normalize': scale by first integration (first value will be 1.0) 
+            Can also be a custom float value of tuple of float values.
+            Defaults to volumic integration.
+        kwds:
+            Extra keywords arguments that will be passed towards implementation 
+            enstrophy operator __init__.
+        """
+        
+        check_instance(field, Field)
+        check_instance(variables, dict, keys=Field, values=CartesianTopologyDescriptors)
+
+        scaling = first_not_None(scaling, 'volumic')
+        if isinstance(scaling, float):
+            scaling = (scaling,)*field.nb_components
+        
+        # Generate parameter if not supplied.
+        if (parameter is None):
+            parameter = VolumicIntegrationParameter(field=field)
+        if (parameter.size != field.nb_components):
+            msg='Expected a parameter of size {} but got a parameter of size {}.'
+            msg=msg.format(field.nb_components, parameter.size)
+            raise RuntimeError(msg)
+        
+        check_instance(parameter, (ScalarParameter, TensorParameter))
+        check_instance(scaling, (str,tuple))
+        if isinstance(scaling, tuple):
+            check_instance(tuple, values=float, size=field.nb_components)
+        
+        input_fields  = { field: variables[field] }
+        output_params = { parameter.name: parameter }
+
+        self.field     = field
+        self.parameter = parameter
+        self.scaling   = scaling
+        self.scaling_coeff = None
+        
+        super(IntegrateBase, self).__init__(input_fields=input_fields, output_params=output_params, **kwds)
+    
+    @debug
+    def discretize(self):
+        if self.discretized:
+            return
+        super(IntegrateBase, self).discretize()
+        dF = self.input_discrete_fields[self.field]
+
+        scaling = self.scaling
+        if (scaling == 'volumic'):
+            scaling_coeff = npw.prod(dF.space_step) / npw.prod(dF.domain.length)
+            scaling_coeff = (scaling_coeff,)*dF.nb_components
+        elif (scaling == 'normalize'):
+            scaling_coeff = [None,]*dF.nb_components
+        elif isinstance(scaling, tuple):
+            scaling_coeff = tuple(scaling)
+        else:
+            msg='Unknown scaling method {}'.format(self.scaling)
+            raise ValueError(msg)
+
+        assert len(scaling_coeff)==dF.nb_components
+
+        self.dF = dF
+        self.scaling_coeff = scaling_coeff
+
diff --git a/hysop/operator/base/min_max.py b/hysop/operator/base/min_max.py
index 7aa698b1a..daabf3cfa 100644
--- a/hysop/operator/base/min_max.py
+++ b/hysop/operator/base/min_max.py
@@ -71,7 +71,8 @@ class MinMaxFieldStatisticsBase(object):
                 param = make_param(k, quiet=True)
             else:
                 param = None
-            assert npw.prod(param.shape) == nb_components
+            if (param is not None):
+                assert npw.prod(param.shape) == nb_components
             parameters[k] = param
         return parameters 
 
diff --git a/hysop/operator/directional/diffusion_dir.py b/hysop/operator/directional/diffusion_dir.py
index 2303d7a83..6c536345e 100644
--- a/hysop/operator/directional/diffusion_dir.py
+++ b/hysop/operator/directional/diffusion_dir.py
@@ -105,8 +105,10 @@ class DirectionalDiffusion(DirectionalSymbolic):
 
         if isinstance(coeffs, npw.ndarray):
             coeffs = (coeffs,)
-        if isinstance(coeffs, (Field, Parameter)):
+        elif isinstance(coeffs, Parameter):
             coeffs = (coeffs.s,)
+        elif isinstance(coeffs, Field):
+            coeffs = (coeffs.s(*coeffs.domain.frame.vars),)
 
         fields     = to_tuple(fields)
         coeffs     = to_tuple(coeffs)
diff --git a/hysop/operator/integrate.py b/hysop/operator/integrate.py
new file mode 100644
index 000000000..4c0a8e646
--- /dev/null
+++ b/hysop/operator/integrate.py
@@ -0,0 +1,100 @@
+
+
+"""
+@file enstrophy.py
+Enstrophy solver frontend.
+"""
+from hysop.constants         import Implementation
+from hysop.tools.types       import check_instance
+from hysop.tools.enum        import EnumFactory
+from hysop.tools.decorators  import debug
+from hysop.fields.continuous_field import Field
+from hysop.topology.cartesian_descriptor import CartesianTopologyDescriptors
+from hysop.core.graph.computational_node_frontend import ComputationalGraphNodeFrontend
+from hysop.parameters.scalar_parameter import ScalarParameter, TensorParameter
+from hysop.parameters.default_parameters import VolumicIntegrationParameter
+
+
+class Integrate(ComputationalGraphNodeFrontend):
+    """
+    Interface for integrating fields on their domain
+    Available implementations are: 
+        *OPENCL (gpu based implementation)
+    """
+    
+    @classmethod
+    def implementations(cls):
+        from hysop.backend.device.opencl.operator.integrate import OpenClIntegrate
+        implementations = {
+                Implementation.OPENCL: OpenClIntegrate
+        }
+        return implementations
+    
+    @classmethod
+    def default_implementation(cls):
+        return Implementation.OPENCL
+    
+    @debug
+    def __init__(self, field, variables,
+                parameter=None, scaling=None,
+                implementation=None, base_kwds=None, **kwds):
+        """
+        Initialize a Integrate operator frontend.
+
+        Integrate a field on it compute domain and put the result in a parameter.
+
+        in:  field
+            Possibly as multi-component field that should be integrated.
+        out: parameter
+             P = scaling * integral_V(field)
+             where V is the field domain volume
+             and scaling depends on specified scaling method.
+        
+        parameter
+        ----------
+        field: Field
+            Input continuous field to be integrated.
+        variables: dict
+            dictionary of fields as keys and topologies as values.
+        parameter: ScalarParameter or TensorParameter
+            The output parameter that will contain the integral.
+            Should match field.nb_components. 
+            A default parameter will be created if not specified.
+        scaling: None, float, str or array-like of str, optional
+            Scaling method used after integration.
+            'volumic':   scale by domain size (product of mesh space steps)
+            'normalize': scale by first integration (first value will be 1.0) 
+            Defaults to volumic integration.
+        implementation: Implementation, optional, defaults to None
+            target implementation, should be contained in available_implementations().
+            If None, implementation will be set to default_implementation().
+        base_kwds: dict, optional, defaults to None
+            Base class keywords arguments.
+            If None, an empty dict will be passed.
+        kwds:
+            Extra keywords arguments that will be passed towards implementation 
+            enstrophy operator __init__.
+
+        Notes
+        -----
+        An Integrate operator implementation should at least support 
+        the hysop.operator.base.integrate.IntegrateBase interface.
+        """
+        base_kwds = base_kwds or dict()
+        
+        check_instance(field, Field)
+        check_instance(variables, dict, keys=Field, values=CartesianTopologyDescriptors)
+        check_instance(parameter, (ScalarParameter, TensorParameter), allow_none=True)
+        check_instance(scaling, str, allow_none=True)
+        check_instance(base_kwds, dict, keys=str)
+        
+        # Pregenerate parameter so that we can directly store it in self.
+        if (parameter is None):
+            parameter = VolumicIntegrationParameter(field=field)
+        if (parameter.size != field.nb_components):
+            msg='Expected a parameter of size {} but got a parameter of size {}.'
+            msg=msg.format(field.nb_components, parameter.size)
+            raise RuntimeError(msg)
+        
+        super(Integrate, self).__init__(field=field, variables=variables,
+                parameter=parameter, scaling=scaling, base_kwds=base_kwds, **kwds)
diff --git a/hysop/operators.py b/hysop/operators.py
index 7e9935f72..53eb1881e 100644
--- a/hysop/operators.py
+++ b/hysop/operators.py
@@ -16,6 +16,7 @@ from hysop.operator.adapt_timestep    import AdaptiveTimeStep
 from hysop.operator.hdf_io            import HDF_Writer, HDF_Reader
 from hysop.operator.custom_symbolic   import CustomSymbolicOperator
 from hysop.operator.parameter_plotter import ParameterPlotter
+from hysop.operator.integrate         import Integrate
 
 from hysop.operator.derivative import SpaceDerivative,       \
                                       MultiSpaceDerivatives, \
diff --git a/hysop/parameters/default_parameters.py b/hysop/parameters/default_parameters.py
index 3bdb1cd5f..88fc81cf2 100644
--- a/hysop/parameters/default_parameters.py
+++ b/hysop/parameters/default_parameters.py
@@ -1,5 +1,7 @@
+from hysop.constants import HYSOP_REAL
 from hysop.tools.types import first_not_None
 from hysop.tools.sympy_utils import greak, Greak, subscripts
+from hysop.parameters.tensor_parameter import TensorParameter
 from hysop.parameters.scalar_parameter import ScalarParameter
 
 def TimeParameters(dtype, **kwds):
@@ -16,4 +18,19 @@ def KineticEnergyParameter(name=None, pretty_name=None, **kwds):
     name        = first_not_None(name, 'kinetic_energy')
     pretty_name = first_not_None(pretty_name, u'E\u2096')
     return ScalarParameter(name=name, pretty_name=pretty_name, **kwds)
-    
+
+def VolumicIntegrationParameter(name=None, pretty_name=None, field=None, dtype=None, **kwds):
+    if (field is not None):
+        pretty_name = first_not_None(pretty_name, name, field.pretty_name)
+        name        = first_not_None(name, field.name)
+        dtype       = first_not_None(dtype, field.dtype)
+    dtype = first_not_None(dtype, HYSOP_REAL)
+    name += '_v'
+    pretty_name += u'\u1d65'.encode('utf-8')
+    if (field.nb_components>1):
+        return TensorParameter(name=name, pretty_name=pretty_name, 
+                               shape=(field.nb_components,), 
+                               dtype=dtype, **kwds)
+    else:
+        return ScalarParameter(name=name, pretty_name=pretty_name, 
+                                dtype=dtype, **kwds)
diff --git a/hysop/parameters/tensor_parameter.py b/hysop/parameters/tensor_parameter.py
index 0e562fcaf..07c67973c 100644
--- a/hysop/parameters/tensor_parameter.py
+++ b/hysop/parameters/tensor_parameter.py
@@ -165,6 +165,9 @@ class TensorParameter(Parameter):
     def _get_shape(self):
         """Get parameter shape."""
         return self._value.shape
+    def _get_size(self):
+        """Get parameter size."""
+        return self._value.size
     def _get_dtype(self):
         """Get parameter dtype."""
         return self._value.dtype
@@ -253,6 +256,7 @@ TensorParameter[name={}, pname={}]
         return ss
 
     shape = property(_get_shape)
+    size = property(_get_size)
     dtype = property(_get_dtype)
     ctype = property(_get_ctype)
     min_value = property(_get_min_value)
-- 
GitLab